summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-19 09:26:03 +0000
commit9918693037dce8aa4bb6f08741b6812923486c18 (patch)
tree21d2b40bec7e6a7ea664acee056eb3d08e15a1cf /src/tools/rust-analyzer/crates
parentReleasing progress-linux version 1.75.0+dfsg1-5~progress7.99u1. (diff)
downloadrustc-9918693037dce8aa4bb6f08741b6812923486c18.tar.xz
rustc-9918693037dce8aa4bb6f08741b6812923486c18.zip
Merging upstream version 1.76.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates')
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/fixture.rs80
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/input.rs229
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/lib.rs16
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/span.rs208
-rw-r--r--src/tools/rust-analyzer/crates/cfg/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/lib.rs9
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/tests.rs34
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/src/lib.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml23
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs36
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs66
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs115
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs35
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expander.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/find_path.rs178
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs33
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs300
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs96
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs168
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs96
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lower.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs160
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs156
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs174
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs59
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs113
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs241
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs70
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/test_db.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/visibility.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs65
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs195
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs39
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs460
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs246
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs716
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs189
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/files.rs375
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs376
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs423
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs1027
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs85
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/quote.rs170
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/span.rs124
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml29
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs28
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs157
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs54
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs37
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs33
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs101
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs42
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs64
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs138
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs234
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs134
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir.rs37
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs88
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs72
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs62
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs84
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs24
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml7
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs60
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs61
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs455
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs612
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs175
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs63
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs83
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs140
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs334
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs261
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs1675
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs76
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs889
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs241
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs35
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs161
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs216
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs202
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs58
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs74
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs172
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs52
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs136
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs181
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/config.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/item.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs522
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs87
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs252
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs126
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs33
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs42
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs270
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs486
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml14
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs8415
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs268
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs28
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs125
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs45
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt105
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt437
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs173
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs80
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs34
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs82
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs129
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs129
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs106
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs79
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs98
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs55
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs111
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs59
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs67
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/Cargo.toml7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/annotations.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/expand_macro.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/extend_selection.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs60
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs92
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs79
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs81
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs227
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs54
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs218
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/interpret_function.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/navigation_target.rs620
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/parent_module.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs129
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs100
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/signature_help.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/status.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html48
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs47
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_hir.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_mir.rs6
-rw-r--r--src/tools/rust-analyzer/crates/intern/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/intern/src/lib.rs14
-rw-r--r--src/tools/rust-analyzer/crates/limit/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/src/lib.rs38
-rw-r--r--src/tools/rust-analyzer/crates/mbe/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs62
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs48
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs175
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs338
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs159
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/parser.rs68
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs1022
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs15
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs4
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/token_map.rs156
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs48
-rw-r--r--src/tools/rust-analyzer/crates/parser/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/event.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs10
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs95
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/params.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lexed_str.rs95
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/shortcuts.rs36
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast86
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast92
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs47
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast28
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs14
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast41
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast66
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast30
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs1
-rw-r--r--src/tools/rust-analyzer/crates/paths/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs41
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs121
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs196
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs31
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs4
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs12
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs46
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs85
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs49
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs64
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs98
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs26
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/profile/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs4
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/project_json.rs3
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/tests.rs56
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs67
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json140
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json66
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt7
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt7
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt7
-rw-r--r--src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt16
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml43
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs19
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs4
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs17
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs20
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs236
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs42
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs173
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs69
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs7
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs8
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs50
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs61
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs43
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs50
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs28
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs6
-rw-r--r--src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml20
-rw-r--r--src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs48
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/stdx/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/anymap.rs379
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/lib.rs73
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/macros.rs7
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/process.rs2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml14
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs191
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs93
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs42
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs22
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs8
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs3
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs87
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs12
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ptr.rs26
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests.rs6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/token_text.rs2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/utils.rs42
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/validation.rs2
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs91
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/tt/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/lib.rs264
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml3
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/vfs/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/lib.rs23
423 files changed, 28675 insertions, 9339 deletions
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
index 171c113a9..393ffe155 100644
--- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -12,12 +12,10 @@ rust-version.workspace = true
doctest = false
[dependencies]
-salsa = "0.17.0-pre.2"
-rustc-hash = "1.1.0"
-
-triomphe.workspace = true
-
la-arena.workspace = true
+rust-analyzer-salsa.workspace = true
+rustc-hash.workspace = true
+triomphe.workspace = true
# local deps
cfg.workspace = true
diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
index 3f5ccb621..bfdd21555 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
@@ -8,14 +8,15 @@ use test_utils::{
ESCAPED_CURSOR_MARKER,
};
use triomphe::Arc;
-use tt::token_id::{Leaf, Subtree, TokenTree};
+use tt::{Leaf, Subtree, TokenTree};
use vfs::{file_set::FileSet, VfsPath};
use crate::{
input::{CrateName, CrateOrigin, LangCrateOrigin},
- Change, CrateDisplayName, CrateGraph, CrateId, Dependency, Edition, Env, FileId, FilePosition,
- FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacros, ReleaseChannel,
- SourceDatabaseExt, SourceRoot, SourceRootId,
+ span::SpanData,
+ Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
+ FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
+ ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
};
pub const WORKSPACE: SourceRootId = SourceRootId(0);
@@ -134,7 +135,7 @@ impl ChangeFixture {
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
- let mut file_id = FileId(0);
+ let mut file_id = FileId::from_raw(0);
let mut roots = Vec::new();
let mut file_position = None;
@@ -209,7 +210,7 @@ impl ChangeFixture {
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path);
files.push(file_id);
- file_id.0 += 1;
+ file_id = FileId::from_raw(file_id.index() + 1);
}
if crates.is_empty() {
@@ -237,7 +238,12 @@ impl ChangeFixture {
crate_graph
.add_dep(
from_id,
- Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude),
+ Dependency::with_prelude(
+ CrateName::new(&to).unwrap(),
+ to_id,
+ prelude,
+ DependencyKind::Normal,
+ ),
)
.unwrap();
}
@@ -249,7 +255,7 @@ impl ChangeFixture {
if let Some(mini_core) = mini_core {
let core_file = file_id;
- file_id.0 += 1;
+ file_id = FileId::from_raw(file_id.index() + 1);
let mut fs = FileSet::default();
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
@@ -275,7 +281,14 @@ impl ChangeFixture {
for krate in all_crates {
crate_graph
- .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate))
+ .add_dep(
+ krate,
+ Dependency::new(
+ CrateName::new("core").unwrap(),
+ core_crate,
+ DependencyKind::Normal,
+ ),
+ )
.unwrap();
}
}
@@ -283,7 +296,6 @@ impl ChangeFixture {
let mut proc_macros = ProcMacros::default();
if !proc_macro_names.is_empty() {
let proc_lib_file = file_id;
- file_id.0 += 1;
proc_macro_defs.extend(default_test_proc_macros());
let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs);
@@ -317,7 +329,11 @@ impl ChangeFixture {
crate_graph
.add_dep(
krate,
- Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate),
+ Dependency::new(
+ CrateName::new("proc_macros").unwrap(),
+ proc_macros_crate,
+ DependencyKind::Normal,
+ ),
)
.unwrap();
}
@@ -523,10 +539,13 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand(
&self,
- subtree: &Subtree,
- _: Option<&Subtree>,
+ subtree: &Subtree<SpanData>,
+ _: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@@ -537,10 +556,13 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand(
&self,
- _: &Subtree,
- attrs: Option<&Subtree>,
+ _: &Subtree<SpanData>,
+ attrs: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
@@ -552,11 +574,14 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand(
&self,
- input: &Subtree,
- _: Option<&Subtree>,
+ input: &Subtree<SpanData>,
+ _: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
- fn traverse(input: &Subtree) -> Subtree {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
+ fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![];
for tt in input.token_trees.iter().rev() {
let tt = match tt {
@@ -579,13 +604,16 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand(
&self,
- input: &Subtree,
- _: Option<&Subtree>,
+ input: &Subtree<SpanData>,
+ _: Option<&Subtree<SpanData>>,
_: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input));
- fn traverse(input: &Subtree) -> Subtree {
+ fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let token_trees = input
.token_trees
.iter()
@@ -597,7 +625,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Subtree { delimiter: input.delimiter, token_trees }
}
- fn modify_leaf(leaf: &Leaf) -> Leaf {
+ fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
let mut leaf = leaf.clone();
match &mut leaf {
Leaf::Literal(it) => {
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index b75c7079b..c2472363a 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use triomphe::Arc;
-use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
+use crate::span::SpanData;
+
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
@@ -155,6 +156,10 @@ impl CrateOrigin {
pub fn is_local(&self) -> bool {
matches!(self, CrateOrigin::Local { .. })
}
+
+ pub fn is_lib(&self) -> bool {
+ matches!(self, CrateOrigin::Library { .. })
+ }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -238,6 +243,9 @@ impl CrateDisplayName {
}
}
+// FIXME: These should not be defined in here? Why does base db know about proc-macros
+// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
@@ -251,12 +259,16 @@ pub enum ProcMacroKind {
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
- subtree: &Subtree,
- attrs: Option<&Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
- ) -> Result<Subtree, ProcMacroExpansionError>;
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
+#[derive(Debug)]
pub enum ProcMacroExpansionError {
Panic(String),
/// Things like "proc macro server was killed by OOM".
@@ -318,11 +330,69 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
- // FIXME: These things should not be per crate! These are more per workspace crate graph level things
+ // FIXME: These things should not be per crate! These are more per workspace crate graph level
+ // things. This info does need to be somewhat present though as to prevent deduplication from
+ // happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}
+impl CrateData {
+ /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value.
+ pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool {
+ // This method has some obscure bits. These are mostly there to be compliant with
+ // some patches. References to the patches are given.
+ if self.root_file_id != other.root_file_id {
+ return false;
+ }
+
+ if self.display_name != other.display_name {
+ return false;
+ }
+
+ if self.is_proc_macro != other.is_proc_macro {
+ return false;
+ }
+
+ if self.edition != other.edition {
+ return false;
+ }
+
+ if self.version != other.version {
+ return false;
+ }
+
+ let mut opts = self.cfg_options.difference(&other.cfg_options);
+ if let Some(it) = opts.next() {
+ // Don't care if rust_analyzer CfgAtom is the only cfg in the difference set of self's and other's cfgs.
+ // https://github.com/rust-lang/rust-analyzer/blob/0840038f02daec6ba3238f05d8caa037d28701a0/crates/project-model/src/workspace.rs#L894
+ if it.to_string() != "rust_analyzer" {
+ return false;
+ }
+
+ if let Some(_) = opts.next() {
+ return false;
+ }
+ }
+
+ if self.env != other.env {
+ return false;
+ }
+
+ let slf_deps = self.dependencies.iter();
+ let other_deps = other.dependencies.iter();
+
+ if ignore_dev_deps {
+ return slf_deps
+ .clone()
+ .filter(|it| it.kind != DependencyKind::Dev)
+ .eq(other_deps.clone().filter(|it| it.kind != DependencyKind::Dev));
+ }
+
+ slf_deps.eq(other_deps)
+ }
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Edition {
Edition2015,
@@ -350,26 +420,43 @@ impl Env {
}
}
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum DependencyKind {
+ Normal,
+ Dev,
+ Build,
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Dependency {
pub crate_id: CrateId,
pub name: CrateName,
+ kind: DependencyKind,
prelude: bool,
}
impl Dependency {
- pub fn new(name: CrateName, crate_id: CrateId) -> Self {
- Self { name, crate_id, prelude: true }
+ pub fn new(name: CrateName, crate_id: CrateId, kind: DependencyKind) -> Self {
+ Self { name, crate_id, prelude: true, kind }
}
- pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self {
- Self { name, crate_id, prelude }
+ pub fn with_prelude(
+ name: CrateName,
+ crate_id: CrateId,
+ prelude: bool,
+ kind: DependencyKind,
+ ) -> Self {
+ Self { name, crate_id, prelude, kind }
}
/// Whether this dependency is to be added to the depending crate's extern prelude.
pub fn is_prelude(&self) -> bool {
self.prelude
}
+
+ pub fn kind(&self) -> DependencyKind {
+ self.kind
+ }
}
impl CrateGraph {
@@ -573,23 +660,46 @@ impl CrateGraph {
pub fn extend(&mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths) {
let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
-
for topo in topo {
let crate_data = &mut other.arena[topo];
+
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
-
- let res = self.arena.iter().find_map(
- |(id, data)| {
- if data == crate_data {
- Some(id)
- } else {
- None
+ let res = self.arena.iter().find_map(|(id, data)| {
+ match (&data.origin, &crate_data.origin) {
+ (a, b) if a == b => {
+ if data.eq_ignoring_origin_and_deps(&crate_data, false) {
+ return Some((id, false));
+ }
+ }
+ (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. })
+ | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => {
+ // If the origins differ, check if the two crates are equal without
+ // considering the dev dependencies, if they are, they most likely are in
+ // different loaded workspaces which may cause issues. We keep the local
+ // version and discard the library one as the local version may have
+ // dev-dependencies that we want to keep resolving. See #15656 for more
+ // information.
+ if data.eq_ignoring_origin_and_deps(&crate_data, true) {
+ return Some((id, if a.is_local() { false } else { true }));
+ }
}
- },
- );
- if let Some(res) = res {
+ (_, _) => return None,
+ }
+
+ None
+ });
+
+ if let Some((res, should_update_lib_to_local)) = res {
id_map.insert(topo, res);
+ if should_update_lib_to_local {
+ assert!(self.arena[res].origin.is_lib());
+ assert!(crate_data.origin.is_local());
+ self.arena[res].origin = crate_data.origin.clone();
+
+ // Move local's dev dependencies into the newly-local-formerly-lib crate.
+ self.arena[res].dependencies = crate_data.dependencies.clone();
+ }
} else {
let id = self.arena.alloc(crate_data.clone());
id_map.insert(topo, id);
@@ -635,9 +745,11 @@ impl CrateGraph {
match (cfg_if, std) {
(Some(cfg_if), Some(std)) => {
self.arena[cfg_if].dependencies.clear();
- self.arena[std]
- .dependencies
- .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if));
+ self.arena[std].dependencies.push(Dependency::new(
+ CrateName::new("cfg_if").unwrap(),
+ cfg_if,
+ DependencyKind::Normal,
+ ));
true
}
_ => false,
@@ -657,6 +769,8 @@ impl ops::Index<CrateId> for CrateGraph {
}
impl CrateData {
+ /// Add a dependency to `self` without checking if the dependency
+ // is existent among `self.dependencies`.
fn add_dep(&mut self, dep: Dependency) {
self.dependencies.push(dep)
}
@@ -758,7 +872,7 @@ impl fmt::Display for CyclicDependenciesError {
#[cfg(test)]
mod tests {
- use crate::CrateOrigin;
+ use crate::{CrateOrigin, DependencyKind};
use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
@@ -766,7 +880,7 @@ mod tests {
fn detect_cyclic_dependency_indirect() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -779,7 +893,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -792,7 +906,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
- FileId(3u32),
+ FileId::from_raw(3u32),
Edition2018,
None,
None,
@@ -805,13 +919,22 @@ mod tests {
None,
);
assert!(graph
- .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .add_dep(
+ crate2,
+ Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1))
+ .add_dep(
+ crate3,
+ Dependency::new(CrateName::new("crate1").unwrap(), crate1, DependencyKind::Normal)
+ )
.is_err());
}
@@ -819,7 +942,7 @@ mod tests {
fn detect_cyclic_dependency_direct() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -832,7 +955,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -845,10 +968,16 @@ mod tests {
None,
);
assert!(graph
- .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate2,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_err());
}
@@ -856,7 +985,7 @@ mod tests {
fn it_works() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -869,7 +998,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -882,7 +1011,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
- FileId(3u32),
+ FileId::from_raw(3u32),
Edition2018,
None,
None,
@@ -895,10 +1024,16 @@ mod tests {
None,
);
assert!(graph
- .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
+ .add_dep(
+ crate1,
+ Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal)
+ )
.is_ok());
assert!(graph
- .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
+ .add_dep(
+ crate2,
+ Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal)
+ )
.is_ok());
}
@@ -906,7 +1041,7 @@ mod tests {
fn dashes_are_normalized() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
- FileId(1u32),
+ FileId::from_raw(1u32),
Edition2018,
None,
None,
@@ -919,7 +1054,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
- FileId(2u32),
+ FileId::from_raw(2u32),
Edition2018,
None,
None,
@@ -934,12 +1069,20 @@ mod tests {
assert!(graph
.add_dep(
crate1,
- Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
+ Dependency::new(
+ CrateName::normalize_dashes("crate-name-with-dashes"),
+ crate2,
+ DependencyKind::Normal
+ )
)
.is_ok());
assert_eq!(
graph[crate1].dependencies,
- vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2)]
+ vec![Dependency::new(
+ CrateName::new("crate_name_with_dashes").unwrap(),
+ crate2,
+ DependencyKind::Normal
+ )]
);
}
}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
index af204e44e..57e793436 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -1,10 +1,11 @@
//! base_db defines basic database traits. The concrete DB is defined by ide.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod input;
mod change;
pub mod fixture;
+pub mod span;
use std::panic;
@@ -16,9 +17,9 @@ pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
- Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
- ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
- ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
+ DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
+ ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
+ ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
},
};
pub use salsa::{self, Cancelled};
@@ -67,20 +68,19 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
- // Parses the file into the syntax tree.
- #[salsa::invoke(parse_query)]
+ /// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph.
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
- /// The crate graph.
+ /// The proc macros.
#[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>;
}
-fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
+fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)
diff --git a/src/tools/rust-analyzer/crates/base-db/src/span.rs b/src/tools/rust-analyzer/crates/base-db/src/span.rs
new file mode 100644
index 000000000..d8990eb7c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/base-db/src/span.rs
@@ -0,0 +1,208 @@
+//! File and span related types.
+// FIXME: This should probably be moved into its own crate.
+use std::fmt;
+
+use salsa::InternId;
+use tt::SyntaxContext;
+use vfs::FileId;
+
+pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
+
+// The first inde is always the root node's AstId
+pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
+ la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
+
+pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct SyntaxContextId(InternId);
+
+impl fmt::Debug for SyntaxContextId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if *self == Self::SELF_REF {
+ f.debug_tuple("SyntaxContextId")
+ .field(&{
+ #[derive(Debug)]
+ #[allow(non_camel_case_types)]
+ struct SELF_REF;
+ SELF_REF
+ })
+ .finish()
+ } else {
+ f.debug_tuple("SyntaxContextId").field(&self.0).finish()
+ }
+ }
+}
+crate::impl_intern_key!(SyntaxContextId);
+
+impl fmt::Display for SyntaxContextId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.0.as_u32())
+ }
+}
+
+impl SyntaxContext for SyntaxContextId {
+ const DUMMY: Self = Self::ROOT;
+}
+// inherent trait impls please tyvm
+impl SyntaxContextId {
+ pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
+ // veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
+ // we need a special value that behaves as the current context.
+ pub const SELF_REF: Self =
+ SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
+
+ pub fn is_root(self) -> bool {
+ self == Self::ROOT
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+pub struct SpanAnchor {
+ pub file_id: FileId,
+ pub ast_id: ErasedFileAstId,
+}
+
+impl fmt::Debug for SpanAnchor {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
+ }
+}
+
+impl tt::SpanAnchor for SpanAnchor {
+ const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID };
+}
+
+/// Input to the analyzer is a set of files, where each file is identified by
+/// `FileId` and contains source code. However, another source of source code in
+/// Rust are macros: each macro can be thought of as producing a "temporary
+/// file". To assign an id to such a file, we use the id of the macro call that
+/// produced the file. So, a `HirFileId` is either a `FileId` (source code
+/// written by user), or a `MacroCallId` (source code produced by macro).
+///
+/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
+/// containing the call plus the offset of the macro call in the file. Note that
+/// this is a recursive definition! However, the size_of of `HirFileId` is
+/// finite (because everything bottoms out at the real `FileId`) and small
+/// (`MacroCallId` uses the location interning. You can check details here:
+/// <https://en.wikipedia.org/wiki/String_interning>).
+///
+/// The two variants are encoded in a single u32 which are differentiated by the MSB.
+/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
+/// `MacroCallId`.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct HirFileId(u32);
+
+impl From<HirFileId> for u32 {
+ fn from(value: HirFileId) -> Self {
+ value.0
+ }
+}
+
+impl From<MacroCallId> for HirFileId {
+ fn from(value: MacroCallId) -> Self {
+ value.as_file()
+ }
+}
+
+impl fmt::Debug for HirFileId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.repr().fmt(f)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroFileId {
+ pub macro_call_id: MacroCallId,
+}
+
+/// `MacroCallId` identifies a particular macro invocation, like
+/// `println!("Hello, {}", world)`.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MacroCallId(salsa::InternId);
+crate::impl_intern_key!(MacroCallId);
+
+impl MacroCallId {
+ pub fn as_file(self) -> HirFileId {
+ MacroFileId { macro_call_id: self }.into()
+ }
+
+ pub fn as_macro_file(self) -> MacroFileId {
+ MacroFileId { macro_call_id: self }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub enum HirFileIdRepr {
+ FileId(FileId),
+ MacroFile(MacroFileId),
+}
+
+impl fmt::Debug for HirFileIdRepr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(),
+ Self::MacroFile(arg0) => {
+ f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
+ }
+ }
+ }
+}
+
+impl From<FileId> for HirFileId {
+ fn from(id: FileId) -> Self {
+ _ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
+ assert!(id.index() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.index());
+ HirFileId(id.index())
+ }
+}
+
+impl From<MacroFileId> for HirFileId {
+ fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
+ _ = Self::ASSERT_MAX_FILE_ID_IS_SAME;
+ let id = id.as_u32();
+ assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {} is too large", id);
+ HirFileId(id | Self::MACRO_FILE_TAG_MASK)
+ }
+}
+
+impl HirFileId {
+ const ASSERT_MAX_FILE_ID_IS_SAME: () =
+ [()][(Self::MAX_HIR_FILE_ID != FileId::MAX_FILE_ID) as usize];
+
+ const MAX_HIR_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
+ const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
+
+ #[inline]
+ pub fn is_macro(self) -> bool {
+ self.0 & Self::MACRO_FILE_TAG_MASK != 0
+ }
+
+ #[inline]
+ pub fn macro_file(self) -> Option<MacroFileId> {
+ match self.0 & Self::MACRO_FILE_TAG_MASK {
+ 0 => None,
+ _ => Some(MacroFileId {
+ macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
+ }),
+ }
+ }
+
+ #[inline]
+ pub fn file_id(self) -> Option<FileId> {
+ match self.0 & Self::MACRO_FILE_TAG_MASK {
+ 0 => Some(FileId::from_raw(self.0)),
+ _ => None,
+ }
+ }
+
+ #[inline]
+ pub fn repr(self) -> HirFileIdRepr {
+ match self.0 & Self::MACRO_FILE_TAG_MASK {
+ 0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)),
+ _ => HirFileIdRepr::MacroFile(MacroFileId {
+ macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
+ }),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index ed3808972..4324584df 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -23,8 +23,8 @@ oorandom = "11.1.3"
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
# supports `arbitrary`. This way, we avoid feature unification.
-arbitrary = "1.3.0"
-derive_arbitrary = "1.3.1"
+arbitrary = "1.3.2"
+derive_arbitrary = "1.3.2"
# local deps
mbe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
index 0aeb0b050..6b178e7b0 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -1,6 +1,6 @@
//! cfg defines conditional compiling options, `cfg` attribute parser and evaluator
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod cfg_expr;
mod dnf;
@@ -58,6 +58,13 @@ impl CfgOptions {
self.enabled.insert(CfgAtom::KeyValue { key, value });
}
+ pub fn difference<'a>(
+ &'a self,
+ other: &'a CfgOptions,
+ ) -> impl Iterator<Item = &'a CfgAtom> + 'a {
+ self.enabled.difference(&other.enabled)
+ }
+
pub fn apply_diff(&mut self, diff: CfgDiff) {
for atom in diff.enable {
self.enabled.insert(atom);
diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
index bdc3f854e..c7ac1af93 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs
@@ -1,37 +1,31 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
-use mbe::syntax_node_to_token_tree;
+use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
fn check_dnf(input: &str, expect: Expect) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
}
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@@ -40,11 +34,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();
diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
index e7f7adc78..4322d2d96 100644
--- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
@@ -12,9 +12,9 @@ rust-version.workspace = true
doctest = false
[dependencies]
+cargo_metadata.workspace = true
crossbeam-channel = "0.5.8"
-tracing = "0.1.37"
-cargo_metadata = "0.15.4"
+tracing.workspace = true
rustc-hash = "1.1.0"
serde_json.workspace = true
serde.workspace = true
diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
index 2de719af9..68faca51e 100644
--- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs
@@ -2,7 +2,7 @@
//! another compatible command (f.x. clippy) in a background thread and provide
//! LSP diagnostics based on the output of the command.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
ffi::OsString,
@@ -50,6 +50,7 @@ pub enum FlycheckConfig {
extra_args: Vec<String>,
extra_env: FxHashMap<String, String>,
ansi_color_output: bool,
+ target_dir: Option<PathBuf>,
},
CustomCommand {
command: String,
@@ -308,6 +309,7 @@ impl FlycheckActor {
features,
extra_env,
ansi_color_output,
+ target_dir,
} => {
let mut cmd = Command::new(toolchain::cargo());
cmd.arg(command);
@@ -340,6 +342,9 @@ impl FlycheckActor {
cmd.arg(features.join(" "));
}
}
+ if let Some(target_dir) = target_dir {
+ cmd.arg("--target-dir").arg(target_dir);
+ }
cmd.envs(extra_env);
(cmd, extra_args)
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index 8cf61ee04..2d1745176 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -12,29 +12,24 @@ rust-version.workspace = true
doctest = false
[dependencies]
-anymap = "1.0.0-beta.2"
arrayvec = "0.7.2"
-bitflags = "2.1.0"
+bitflags.workspace = true
cov-mark = "2.0.0-pre.1"
-# We need to freeze the version of the crate, as the raw-api feature is considered unstable
-dashmap = { version = "=5.4.0", features = ["raw-api"] }
+dashmap.workspace = true
drop_bomb = "0.1.5"
-either = "1.7.0"
+either.workspace = true
fst = { version = "0.4.7", default-features = false }
-indexmap = "2.0.0"
-itertools = "0.10.5"
+indexmap.workspace = true
+itertools.workspace = true
la-arena.workspace = true
once_cell = "1.17.0"
rustc-hash = "1.1.0"
-tracing = "0.1.35"
+tracing.workspace = true
smallvec.workspace = true
hashbrown.workspace = true
triomphe.workspace = true
-rustc_abi.workspace = true
-rustc_index.workspace = true
-rustc_parse_format.workspace = true
-
+rustc-dependencies.workspace = true
# local deps
stdx.workspace = true
@@ -48,8 +43,12 @@ cfg.workspace = true
tt.workspace = true
limit.workspace = true
+
[dev-dependencies]
expect-test = "1.4.0"
# local deps
test-utils.workspace = true
+
+[features]
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index c6454eb9e..942b28fc1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -215,6 +215,10 @@ impl Attrs {
self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
}
+ pub fn export_name(&self) -> Option<&SmolStr> {
+ self.by_key("export_name").string_value()
+ }
+
pub fn is_proc_macro(&self) -> bool {
self.by_key("proc_macro").exists()
}
@@ -417,6 +421,7 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
+ db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::TypeParamId(it) => {
@@ -424,11 +429,16 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
+ db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
- RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id]))
+ RawAttrs::from_attrs_owner(
+ db.upcast(),
+ src.with_value(&src.value[it.local_id]),
+ db.span_map(src.file_id).as_ref(),
+ )
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs
index 2ae3cd2a9..48a596f7f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr/builtin.rs
@@ -2,7 +2,7 @@
//!
//! The actual definitions were copied from rustc's `compiler/rustc_feature/src/builtin_attrs.rs`.
//!
-//! It was last synchronized with upstream commit e29821ff85a2a3000d226f99f62f89464028d5d6.
+//! It was last synchronized with upstream commit c3def263a44e07e09ae6d57abfc8650227fb4972.
//!
//! The macros were adjusted to only expand to the attribute name, since that is all we need to do
//! name resolution, and `BUILTIN_ATTRIBUTES` is almost entirely unchanged from the original, to
@@ -240,7 +240,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
template!(List: "address, kcfi, memory, thread"), DuplicatesOk,
experimental!(no_sanitize)
),
- gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, experimental!(coverage)),
+ gated!(coverage, Normal, template!(Word, List: "on|off"), WarnFollowing, coverage_attribute, experimental!(coverage)),
ungated!(
doc, Normal, template!(List: "hidden|inline|...", NameValueStr: "string"), DuplicatesOk
@@ -364,7 +364,6 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
allow_internal_unsafe, Normal, template!(Word), WarnFollowing,
"allow_internal_unsafe side-steps the unsafe_code lint",
),
- ungated!(rustc_safe_intrinsic, Normal, template!(Word), DuplicatesOk),
rustc_attr!(rustc_allowed_through_unstable_modules, Normal, template!(Word), WarnFollowing,
"rustc_allowed_through_unstable_modules special cases accidental stabilizations of stable items \
through unstable paths"),
@@ -453,6 +452,12 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
ErrorFollowing,
INTERNAL_UNSTABLE
),
+ rustc_attr!(
+ rustc_confusables, Normal,
+ template!(List: r#""name1", "name2", ..."#),
+ ErrorFollowing,
+ INTERNAL_UNSTABLE,
+ ),
// Enumerates "identity-like" conversion methods to suggest on type mismatch.
rustc_attr!(
rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
@@ -488,6 +493,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(
rustc_do_not_const_check, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
),
+ // Ensure the argument to this function is &&str during const-check.
+ rustc_attr!(
+ rustc_const_panic_str, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE
+ ),
// ==========================================================================
// Internal attributes, Layout related:
@@ -521,6 +530,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
"#[rustc_pass_by_value] is used to mark types that must be passed by value instead of reference."
),
rustc_attr!(
+ rustc_never_returns_null_ptr, Normal, template!(Word), ErrorFollowing,
+ "#[rustc_never_returns_null_ptr] is used to mark functions returning non-null pointers."
+ ),
+ rustc_attr!(
rustc_coherence_is_core, AttributeType::CrateLevel, template!(Word), ErrorFollowing, @only_local: true,
"#![rustc_coherence_is_core] allows inherent methods on builtin types, only intended to be used in `core`."
),
@@ -533,7 +546,11 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
"#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
),
rustc_attr!(
- rustc_deny_explicit_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: false,
+ rustc_deny_explicit_impl,
+ AttributeType::Normal,
+ template!(List: "implement_via_object = (true|false)"),
+ ErrorFollowing,
+ @only_local: true,
"#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls"
),
rustc_attr!(
@@ -614,6 +631,10 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_doc_primitive, Normal, template!(NameValueStr: "primitive name"), ErrorFollowing,
r#"`rustc_doc_primitive` is a rustc internal attribute"#,
),
+ rustc_attr!(
+ rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing,
+ "the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe"
+ ),
// ==========================================================================
// Internal attributes, Testing:
@@ -625,13 +646,16 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(TEST, rustc_insignificant_dtor, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_strict_coherence, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_variance, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_variance_of_opaques, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_hidden_type_of_opaques, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
+ rustc_attr!(TEST, rustc_abi, Normal, template!(List: "field1, field2, ..."), WarnFollowing),
rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing),
rustc_attr!(
TEST, rustc_error, Normal,
- template!(Word, List: "delay_span_bug_from_inside_query"), WarnFollowingWordOnly
+ template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly
),
- rustc_attr!(TEST, rustc_dump_user_substs, Normal, template!(Word), WarnFollowing),
+ rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing),
rustc_attr!(
TEST, rustc_if_this_changed, Normal, template!(Word, List: "DepNode"), DuplicatesOk
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs
index e4c8d446a..0f98a4ec9 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr/tests.rs
@@ -1,17 +1,20 @@
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
+use base_db::FileId;
+use hir_expand::span::{RealSpanMap, SpanMapRef};
use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
- let (tt, _) = {
- let source_file = ast::SourceFile::parse(input).ok().unwrap();
- let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- syntax_node_to_token_tree(tt.syntax())
- };
+ let source_file = ast::SourceFile::parse(input).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let tt = syntax_node_to_token_tree(
+ tt.syntax(),
+ SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))),
+ );
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
index c0baf6011..db28c6731 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -57,7 +57,7 @@ pub struct Body {
pub type ExprPtr = AstPtr<ast::Expr>;
pub type ExprSource = InFile<ExprPtr>;
-pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
+pub type PatPtr = AstPtr<Either<ast::Pat, ast::SelfParam>>;
pub type PatSource = InFile<PatPtr>;
pub type LabelPtr = AstPtr<ast::Label>;
@@ -95,6 +95,8 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
+ format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
+
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
/// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
@@ -356,12 +358,12 @@ impl BodySourceMap {
}
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
- let src = node.map(|it| Either::Left(AstPtr::new(it)));
+ let src = node.map(|it| AstPtr::new(it).wrap_left());
self.pat_map.get(&src).cloned()
}
pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option<PatId> {
- let src = node.map(|it| Either::Right(AstPtr::new(it)));
+ let src = node.map(|it| AstPtr::new(it).wrap_right());
self.pat_map.get(&src).cloned()
}
@@ -387,6 +389,14 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}
+ pub fn implicit_format_args(
+ &self,
+ node: InFile<&ast::FormatArgsExpr>,
+ ) -> Option<&[(syntax::TextRange, Name)]> {
+ let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
+ self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
+ }
+
/// Get a reference to the body source map's diagnostics.
pub fn diagnostics(&self) -> &[BodyDiagnostic] {
&self.diagnostics
@@ -403,8 +413,10 @@ impl BodySourceMap {
field_map_back,
pat_field_map_back,
expansions,
+ format_args_template_map,
diagnostics,
} = self;
+ format_args_template_map.shrink_to_fit();
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index cc02df80a..c6a909320 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -196,16 +196,12 @@ impl ExprCollector<'_> {
if let Some(self_param) =
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
{
- let ptr = AstPtr::new(&self_param);
- let binding_id: la_arena::Idx<Binding> = self.alloc_binding(
- name![self],
- BindingAnnotation::new(
- self_param.mut_token().is_some() && self_param.amp_token().is_none(),
- false,
- ),
- );
- let param_pat =
- self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, Either::Right(ptr));
+ let is_mutable =
+ self_param.mut_token().is_some() && self_param.amp_token().is_none();
+ let ptr = AstPtr::new(&Either::Right(self_param));
+ let binding_id: la_arena::Idx<Binding> =
+ self.alloc_binding(name![self], BindingAnnotation::new(is_mutable, false));
+ let param_pat = self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, ptr);
self.add_definition_to_binding(binding_id, param_pat);
self.body.params.push(param_pat);
}
@@ -1029,7 +1025,7 @@ impl ExprCollector<'_> {
let id = collector(self, Some(expansion.tree()));
self.ast_id_map = prev_ast_id_map;
- self.expander.exit(self.db, mark);
+ self.expander.exit(mark);
id
}
None => collector(self, None),
@@ -1260,8 +1256,8 @@ impl ExprCollector<'_> {
(Some(id), Pat::Bind { id, subpat })
};
- let ptr = AstPtr::new(&pat);
- let pat = self.alloc_pat(pattern, Either::Left(ptr));
+ let ptr = AstPtr::new(&Either::Left(pat));
+ let pat = self.alloc_pat(pattern, ptr);
if let Some(binding_id) = binding {
self.add_definition_to_binding(binding_id, pat);
}
@@ -1395,7 +1391,7 @@ impl ExprCollector<'_> {
ast::Pat::MacroPat(mac) => match mac.macro_call() {
Some(call) => {
let macro_ptr = AstPtr::new(&call);
- let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
+ let src = self.expander.to_source(AstPtr::new(&Either::Left(pat)));
let pat =
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
@@ -1430,8 +1426,8 @@ impl ExprCollector<'_> {
Pat::Range { start, end }
}
};
- let ptr = AstPtr::new(&pat);
- self.alloc_pat(pattern, Either::Left(ptr))
+ let ptr = AstPtr::new(&Either::Left(pat));
+ self.alloc_pat(pattern, ptr)
}
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>, binding_list: &mut BindingList) -> PatId {
@@ -1601,13 +1597,25 @@ impl ExprCollector<'_> {
});
let template = f.template();
let fmt_snippet = template.as_ref().map(ToString::to_string);
+ let mut mappings = vec![];
let fmt = match template.and_then(|it| self.expand_macros_to_string(it)) {
- Some((s, is_direct_literal)) => {
- format_args::parse(&s, fmt_snippet, args, is_direct_literal, |name| {
- self.alloc_expr_desugared(Expr::Path(Path::from(name)))
- })
- }
- None => FormatArgs { template: Default::default(), arguments: args.finish() },
+ Some((s, is_direct_literal)) => format_args::parse(
+ &s,
+ fmt_snippet,
+ args,
+ is_direct_literal,
+ |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
+ |name, span| {
+ if let Some(span) = span {
+ mappings.push((span, name.clone()))
+ }
+ },
+ ),
+ None => FormatArgs {
+ template: Default::default(),
+ arguments: args.finish(),
+ orphans: Default::default(),
+ },
};
// Create a list of all _unique_ (argument, format trait) combinations.
@@ -1746,18 +1754,26 @@ impl ExprCollector<'_> {
});
let unsafe_arg_new = self.alloc_expr_desugared(Expr::Unsafe {
id: None,
- statements: Box::default(),
+ // We collect the unused expressions here so that we still infer them instead of
+ // dropping them out of the expression tree
+ statements: fmt
+ .orphans
+ .into_iter()
+ .map(|expr| Statement::Expr { expr, has_semi: true })
+ .collect(),
tail: Some(unsafe_arg_new),
});
- self.alloc_expr(
+ let idx = self.alloc_expr(
Expr::Call {
callee: new_v1_formatted,
args: Box::new([lit_pieces, args, format_options, unsafe_arg_new]),
is_assignee_expr: false,
},
syntax_ptr,
- )
+ );
+ self.source_map.format_args_template_map.insert(idx, mappings);
+ idx
}
/// Generate a hir expression for a format_args placeholder specification.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
index fad4d7a4d..6ecf1c20d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
@@ -54,7 +54,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('(');
- body.params.iter().zip(&db.function_data(it).params).for_each(|(&param, ty)| {
+ body.params.iter().zip(db.function_data(it).params.iter()).for_each(|(&param, ty)| {
p.print_pat(param);
p.buf.push(':');
p.print_type_ref(ty);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
index 2a90a09f2..baca293e2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
@@ -1,7 +1,6 @@
//! Name resolution for expressions.
use hir_expand::name::Name;
-use la_arena::{Arena, Idx, IdxRange, RawIdx};
-use rustc_hash::FxHashMap;
+use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx};
use triomphe::Arc;
use crate::{
@@ -17,7 +16,7 @@ pub type ScopeId = Idx<ScopeData>;
pub struct ExprScopes {
scopes: Arena<ScopeData>,
scope_entries: Arena<ScopeEntry>,
- scope_by_expr: FxHashMap<ExprId, ScopeId>,
+ scope_by_expr: ArenaMap<ExprId, ScopeId>,
}
#[derive(Debug, PartialEq, Eq)]
@@ -77,10 +76,10 @@ impl ExprScopes {
}
pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
- self.scope_by_expr.get(&expr).copied()
+ self.scope_by_expr.get(expr).copied()
}
- pub fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
+ pub fn scope_by_expr(&self) -> &ArenaMap<ExprId, ScopeId> {
&self.scope_by_expr
}
}
@@ -94,7 +93,7 @@ impl ExprScopes {
let mut scopes = ExprScopes {
scopes: Arena::default(),
scope_entries: Arena::default(),
- scope_by_expr: FxHashMap::default(),
+ scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
};
let mut root = scopes.root_scope();
scopes.add_params_bindings(body, root, &body.params);
@@ -476,10 +475,7 @@ fn foo() {
.pat_syntax(*body.bindings[resolved.binding()].definitions.first().unwrap())
.unwrap();
- let local_name = pat_src.value.either(
- |it| it.syntax_node_ptr().to_node(file.syntax()),
- |it| it.syntax_node_ptr().to_node(file.syntax()),
- );
+ let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
index 1658757d2..2b432dfbb 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
@@ -143,7 +143,6 @@ mod m {
#[test]
fn desugar_builtin_format_args() {
- // Regression test for a path resolution bug introduced with inner item handling.
let (db, body, def) = lower(
r#"
//- minicore: fmt
@@ -161,7 +160,7 @@ fn main() {
let count = 10;
builtin#lang(Arguments::new_v1_formatted)(
&[
- "\"hello ", " ", " friends, we ", " ", "", "\"",
+ "hello ", " ", " friends, we ", " ", "",
],
&[
builtin#lang(Argument::new_display)(
@@ -221,3 +220,115 @@ fn main() {
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
+
+#[test]
+fn test_macro_hygiene() {
+ let (db, body, def) = lower(
+ r##"
+//- minicore: fmt, from
+//- /main.rs
+mod error;
+
+use crate::error::error;
+
+fn main() {
+ // _ = forces body expansion instead of block def map expansion
+ _ = error!("Failed to resolve path `{}`", node.text());
+}
+//- /error.rs
+macro_rules! _error {
+ ($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
+}
+pub(crate) use _error as error;
+macro_rules! _intermediate {
+ ($arg:expr) => {$crate::error::SsrError::new($arg)}
+}
+pub(crate) use _intermediate as intermediate;
+
+pub struct SsrError(pub(crate) core::fmt::Arguments);
+
+impl SsrError {
+ pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
+ SsrError(message.into())
+ }
+}
+"##,
+ );
+
+ assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
+ expect![[r#"
+ fn main() {
+ _ = $crate::error::SsrError::new(
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "Failed to resolve path `", "`",
+ ],
+ &[
+ builtin#lang(Argument::new_display)(
+ &node.text(),
+ ),
+ ],
+ &[
+ builtin#lang(Placeholder::new)(
+ 0usize,
+ ' ',
+ builtin#lang(Alignment::Unknown),
+ 0u32,
+ builtin#lang(Count::Implied),
+ builtin#lang(Count::Implied),
+ ),
+ ],
+ unsafe {
+ builtin#lang(UnsafeArg::new)()
+ },
+ ),
+ );
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def))
+}
+
+#[test]
+fn regression_10300() {
+ let (db, body, def) = lower(
+ r#"
+//- minicore: concat, panic
+mod private {
+ pub use core::concat;
+}
+
+macro_rules! m {
+ () => {
+ panic!(concat!($crate::private::concat!("cc")));
+ };
+}
+
+fn f() {
+ m!();
+}
+"#,
+ );
+
+ let (_, source_map) = db.body_with_source_map(def.into());
+ assert_eq!(source_map.diagnostics(), &[]);
+
+ for (_, def_map) in body.blocks(&db) {
+ assert_eq!(def_map.diagnostics(), &[]);
+ }
+
+ expect![[r#"
+ fn f() {
+ $crate::panicking::panic_fmt(
+ builtin#lang(Arguments::new_v1_formatted)(
+ &[
+ "cc",
+ ],
+ &[],
+ &[],
+ unsafe {
+ builtin#lang(UnsafeArg::new)()
+ },
+ ),
+ );
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
index 4cfd318a4..c82d2347d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -5,8 +5,7 @@
//! node for a *child*, and get its hir.
use either::Either;
-use hir_expand::HirFileId;
-use syntax::ast::HasDocComments;
+use hir_expand::{attrs::collect_attrs, HirFileId};
use crate::{
db::DefDatabase,
@@ -118,8 +117,8 @@ impl ChildBySource for ItemScope {
|(ast_id, calls)| {
let adt = ast_id.to_node(db.upcast());
calls.for_each(|(attr_id, call_id, calls)| {
- if let Some(Either::Left(attr)) =
- adt.doc_comments_and_attrs().nth(attr_id.ast_index())
+ if let Some((_, Either::Left(attr))) =
+ collect_attrs(&adt).nth(attr_id.ast_index())
{
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
index 68defa385..635d13f24 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -15,9 +15,7 @@ use crate::{
attr::Attrs,
db::DefDatabase,
expander::{Expander, Mark},
- item_tree::{
- self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, Param, TreeId,
- },
+ item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id, macro_id_to_def_id,
nameres::{
attr_resolution::ResolvedAttr,
@@ -36,7 +34,7 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FunctionData {
pub name: Name,
- pub params: Vec<Interned<TypeRef>>,
+ pub params: Box<[Interned<TypeRef>]>,
pub ret_type: Interned<TypeRef>,
pub attrs: Attrs,
pub visibility: RawVisibility,
@@ -69,7 +67,7 @@ impl FunctionData {
let is_varargs = enabled_params
.clone()
.next_back()
- .map_or(false, |param| matches!(item_tree[param], Param::Varargs));
+ .map_or(false, |param| item_tree[param].type_ref.is_none());
let mut flags = func.flags;
if is_varargs {
@@ -105,10 +103,7 @@ impl FunctionData {
name: func.name.clone(),
params: enabled_params
.clone()
- .filter_map(|id| match &item_tree[id] {
- Param::Normal(ty) => Some(ty.clone()),
- Param::Varargs => None,
- })
+ .filter_map(|id| item_tree[id].type_ref.clone())
.collect(),
ret_type: func.ret_type.clone(),
attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()),
@@ -182,7 +177,7 @@ pub struct TypeAliasData {
pub rustc_has_incoherent_inherent_impls: bool,
pub rustc_allow_incoherent_impl: bool,
/// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
- pub bounds: Vec<Interned<TypeBound>>,
+ pub bounds: Box<[Interned<TypeBound>]>,
}
impl TypeAliasData {
@@ -215,7 +210,7 @@ impl TypeAliasData {
is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
rustc_has_incoherent_inherent_impls,
rustc_allow_incoherent_impl,
- bounds: typ.bounds.to_vec(),
+ bounds: typ.bounds.clone(),
})
}
}
@@ -332,6 +327,7 @@ pub struct ImplData {
pub self_ty: Interned<TypeRef>,
pub items: Vec<AssocItemId>,
pub is_negative: bool,
+ pub is_unsafe: bool,
// box it as the vec is usually empty anyways
pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
}
@@ -353,6 +349,7 @@ impl ImplData {
let target_trait = impl_def.target_trait.clone();
let self_ty = impl_def.self_ty.clone();
let is_negative = impl_def.is_negative;
+ let is_unsafe = impl_def.is_unsafe;
let mut collector =
AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
@@ -362,7 +359,14 @@ impl ImplData {
let items = items.into_iter().map(|(_, item)| item).collect();
(
- Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }),
+ Arc::new(ImplData {
+ target_trait,
+ self_ty,
+ items,
+ is_negative,
+ is_unsafe,
+ attribute_calls,
+ }),
diagnostics.into(),
)
}
@@ -659,7 +663,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
MacroCallKind::Attr {
ast_id,
- attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
+ attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@@ -702,7 +706,7 @@ impl<'a> AssocItemCollector<'a> {
}
AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id();
- let MacroCall { ast_id, expand_to, ref path } = item_tree[call];
+ let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
let module = self.expander.module.local_id;
let resolver = |path| {
@@ -721,6 +725,7 @@ impl<'a> AssocItemCollector<'a> {
match macro_call_as_call_id(
self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
+ call_site,
expand_to,
self.expander.module.krate(),
resolver,
@@ -789,7 +794,7 @@ impl<'a> AssocItemCollector<'a> {
self.collect(&item_tree, tree_id, &iter);
- self.expander.exit(self.db, mark);
+ self.expander.exit(mark);
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
index 224f7328f..b163112db 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
@@ -11,7 +11,7 @@ use hir_expand::{
};
use intern::Interned;
use la_arena::{Arena, ArenaMap};
-use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
+use rustc_dependencies::abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
use syntax::ast::{self, HasName, HasVisibility};
use triomphe::Arc;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
index 63138aa6a..a59bbf7e2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map.rs
@@ -29,8 +29,8 @@ use std::{
ops::{Index, IndexMut},
};
-use anymap::Map;
use rustc_hash::FxHashMap;
+use stdx::anymap::Map;
pub struct Key<K, V, P = (K, V)> {
_phantom: PhantomData<(K, V, P)>,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
index 6db8398bc..398f116d8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
@@ -4,21 +4,21 @@ use base_db::CrateId;
use cfg::CfgOptions;
use drop_bomb::DropBomb;
use hir_expand::{
- attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId,
- InFile, MacroCallId, UnresolvedMacro,
+ attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId,
+ InFile, MacroCallId,
};
use limit::Limit;
use syntax::{ast, Parse, SyntaxNode};
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
- MacroId, ModuleId,
+ MacroId, ModuleId, UnresolvedMacro,
};
#[derive(Debug)]
pub struct Expander {
cfg_options: CfgOptions,
- hygiene: Hygiene,
+ span_map: SpanMap,
krate: CrateId,
pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId,
@@ -41,7 +41,7 @@ impl Expander {
recursion_depth: 0,
recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
- hygiene: Hygiene::new(db.upcast(), current_file_id),
+ span_map: db.span_map(current_file_id),
krate: module.krate,
}
}
@@ -94,8 +94,8 @@ impl Expander {
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
- pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
- self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
+ pub fn exit(&mut self, mut mark: Mark) {
+ self.span_map = mark.span_map;
self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@@ -110,7 +110,7 @@ impl Expander {
}
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
- LowerCtx::new(db, &self.hygiene, self.current_file_id)
+ LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@@ -118,7 +118,7 @@ impl Expander {
}
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
- Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
+ Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref()))
}
pub(crate) fn cfg_options(&self) -> &CfgOptions {
@@ -130,8 +130,8 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
- let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
- Path::from_src(path, &ctx)
+ let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
+ Path::from_src(&ctx, path)
}
fn within_limit<F, T: ast::AstNode>(
@@ -174,10 +174,11 @@ impl Expander {
let parse = value.cast::<T>()?;
self.recursion_depth += 1;
- self.hygiene = Hygiene::new(db.upcast(), file_id);
+ let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
let mark = Mark {
file_id: old_file_id,
+ span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"),
};
Some((mark, parse))
@@ -190,5 +191,6 @@ impl Expander {
#[derive(Debug)]
pub struct Mark {
file_id: HirFileId,
+ span_map: SpanMap,
bomb: DropBomb,
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
index b9c5ff727..13af0b021 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -21,9 +21,10 @@ pub fn find_path(
item: ItemInNs,
from: ModuleId,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
let _p = profile::span("find_path");
- find_path_inner(db, item, from, None, prefer_no_std)
+ find_path_inner(db, item, from, None, prefer_no_std, prefer_prelude)
}
pub fn find_path_prefixed(
@@ -32,9 +33,10 @@ pub fn find_path_prefixed(
from: ModuleId,
prefix_kind: PrefixKind,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
let _p = profile::span("find_path_prefixed");
- find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std)
+ find_path_inner(db, item, from, Some(prefix_kind), prefer_no_std, prefer_prelude)
}
#[derive(Copy, Clone, Debug)]
@@ -88,6 +90,7 @@ fn find_path_inner(
from: ModuleId,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
// - if the item is a builtin, it's in scope
if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
@@ -109,6 +112,7 @@ fn find_path_inner(
MAX_PATH_LEN,
prefixed,
prefer_no_std || db.crate_supports_no_std(crate_root.krate),
+ prefer_prelude,
)
.map(|(item, _)| item);
}
@@ -134,6 +138,7 @@ fn find_path_inner(
from,
prefixed,
prefer_no_std,
+ prefer_prelude,
) {
let data = db.enum_data(variant.parent);
path.push_segment(data.variants[variant.local_id].name.clone());
@@ -156,6 +161,7 @@ fn find_path_inner(
from,
prefixed,
prefer_no_std || db.crate_supports_no_std(crate_root.krate),
+ prefer_prelude,
scope_name,
)
.map(|(item, _)| item)
@@ -171,6 +177,7 @@ fn find_path_for_module(
max_len: usize,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<(ModPath, Stability)> {
if max_len == 0 {
return None;
@@ -236,6 +243,7 @@ fn find_path_for_module(
from,
prefixed,
prefer_no_std,
+ prefer_prelude,
scope_name,
)
}
@@ -316,6 +324,7 @@ fn calculate_best_path(
from: ModuleId,
mut prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
scope_name: Option<Name>,
) -> Option<(ModPath, Stability)> {
if max_len <= 1 {
@@ -351,11 +360,14 @@ fn calculate_best_path(
best_path_len - 1,
prefixed,
prefer_no_std,
+ prefer_prelude,
) {
path.0.push_segment(name);
let new_path = match best_path.take() {
- Some(best_path) => select_best_path(best_path, path, prefer_no_std),
+ Some(best_path) => {
+ select_best_path(best_path, path, prefer_no_std, prefer_prelude)
+ }
None => path,
};
best_path_len = new_path.0.len();
@@ -367,18 +379,18 @@ fn calculate_best_path(
// too (unless we can't name it at all). It could *also* be (re)exported by the same crate
// that wants to import it here, but we always prefer to use the external path here.
- let crate_graph = db.crate_graph();
- let extern_paths = crate_graph[from.krate].dependencies.iter().filter_map(|dep| {
+ for dep in &db.crate_graph()[from.krate].dependencies {
let import_map = db.import_map(dep.crate_id);
- import_map.import_info_for(item).and_then(|info| {
+ let Some(import_info_for) = import_map.import_info_for(item) else { continue };
+ for info in import_info_for {
if info.is_doc_hidden {
// the item or import is `#[doc(hidden)]`, so skip it as it is in an external crate
- return None;
+ continue;
}
// Determine best path for containing module and append last segment from `info`.
// FIXME: we should guide this to look up the path locally, or from the same crate again?
- let (mut path, path_stability) = find_path_for_module(
+ let Some((mut path, path_stability)) = find_path_for_module(
db,
def_map,
visited_modules,
@@ -388,22 +400,26 @@ fn calculate_best_path(
max_len - 1,
prefixed,
prefer_no_std,
- )?;
+ prefer_prelude,
+ ) else {
+ continue;
+ };
cov_mark::hit!(partially_imported);
path.push_segment(info.name.clone());
- Some((
+
+ let path_with_stab = (
path,
zip_stability(path_stability, if info.is_unstable { Unstable } else { Stable }),
- ))
- })
- });
+ );
- for path in extern_paths {
- let new_path = match best_path.take() {
- Some(best_path) => select_best_path(best_path, path, prefer_no_std),
- None => path,
- };
- update_best_path(&mut best_path, new_path);
+ let new_path_with_stab = match best_path.take() {
+ Some(best_path) => {
+ select_best_path(best_path, path_with_stab, prefer_no_std, prefer_prelude)
+ }
+ None => path_with_stab,
+ };
+ update_best_path(&mut best_path, new_path_with_stab);
+ }
}
}
if let Some(module) = item.module(db) {
@@ -420,17 +436,39 @@ fn calculate_best_path(
}
}
+/// Select the best (most relevant) path between two paths.
+/// This accounts for stability, path length whether std should be chosen over alloc/core paths as
+/// well as ignoring prelude like paths or not.
fn select_best_path(
- old_path: (ModPath, Stability),
- new_path: (ModPath, Stability),
+ old_path @ (_, old_stability): (ModPath, Stability),
+ new_path @ (_, new_stability): (ModPath, Stability),
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> (ModPath, Stability) {
- match (old_path.1, new_path.1) {
+ match (old_stability, new_stability) {
(Stable, Unstable) => return old_path,
(Unstable, Stable) => return new_path,
_ => {}
}
const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc];
+
+ let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| {
+ let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude);
+ let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude);
+ match (new_has_prelude, old_has_prelude, prefer_prelude) {
+ (true, false, true) | (false, true, false) => new_path,
+ (true, false, false) | (false, true, true) => old_path,
+ // no prelude difference in the paths, so pick the smaller one
+ (true, true, _) | (false, false, _) => {
+ if new_path.0.len() < old_path.0.len() {
+ new_path
+ } else {
+ old_path
+ }
+ }
+ }
+ };
+
match (old_path.0.segments().first(), new_path.0.segments().first()) {
(Some(old), Some(new)) if STD_CRATES.contains(old) && STD_CRATES.contains(new) => {
let rank = match prefer_no_std {
@@ -451,23 +489,11 @@ fn select_best_path(
let orank = rank(old);
match nrank.cmp(&orank) {
Ordering::Less => old_path,
- Ordering::Equal => {
- if new_path.0.len() < old_path.0.len() {
- new_path
- } else {
- old_path
- }
- }
+ Ordering::Equal => choose(new_path, old_path),
Ordering::Greater => new_path,
}
}
- _ => {
- if new_path.0.len() < old_path.0.len() {
- new_path
- } else {
- old_path
- }
- }
+ _ => choose(new_path, old_path),
}
}
@@ -560,7 +586,7 @@ fn find_local_import_locations(
#[cfg(test)]
mod tests {
use base_db::fixture::WithFixture;
- use hir_expand::hygiene::Hygiene;
+ use hir_expand::db::ExpandDatabase;
use syntax::ast::AstNode;
use crate::test_db::TestDB;
@@ -570,13 +596,20 @@ mod tests {
/// `code` needs to contain a cursor marker; checks that `find_path` for the
/// item the `path` refers to returns that same path when called from the
/// module the cursor is in.
- fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option<PrefixKind>) {
+ #[track_caller]
+ fn check_found_path_(
+ ra_fixture: &str,
+ path: &str,
+ prefix_kind: Option<PrefixKind>,
+ prefer_prelude: bool,
+ ) {
let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(pos);
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
- let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
+ let mod_path =
+ ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap();
let def_map = module.def_map(&db);
let resolved = def_map
@@ -589,11 +622,17 @@ mod tests {
)
.0
.take_types()
- .unwrap();
-
- let found_path =
- find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind, false);
- assert_eq!(found_path, Some(mod_path), "{prefix_kind:?}");
+ .expect("path does not resolve to a type");
+
+ let found_path = find_path_inner(
+ &db,
+ ItemInNs::Types(resolved),
+ module,
+ prefix_kind,
+ false,
+ prefer_prelude,
+ );
+ assert_eq!(found_path, Some(mod_path), "on kind: {prefix_kind:?}");
}
fn check_found_path(
@@ -603,10 +642,23 @@ mod tests {
absolute: &str,
self_prefixed: &str,
) {
- check_found_path_(ra_fixture, unprefixed, None);
- check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain));
- check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate));
- check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf));
+ check_found_path_(ra_fixture, unprefixed, None, false);
+ check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain), false);
+ check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate), false);
+ check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf), false);
+ }
+
+ fn check_found_path_prelude(
+ ra_fixture: &str,
+ unprefixed: &str,
+ prefixed: &str,
+ absolute: &str,
+ self_prefixed: &str,
+ ) {
+ check_found_path_(ra_fixture, unprefixed, None, true);
+ check_found_path_(ra_fixture, prefixed, Some(PrefixKind::Plain), true);
+ check_found_path_(ra_fixture, absolute, Some(PrefixKind::ByCrate), true);
+ check_found_path_(ra_fixture, self_prefixed, Some(PrefixKind::BySelf), true);
}
#[test]
@@ -1421,4 +1473,34 @@ pub mod error {
"std::error::Error",
);
}
+
+ #[test]
+ fn respects_prelude_setting() {
+ let ra_fixture = r#"
+//- /main.rs crate:main deps:krate
+$0
+//- /krate.rs crate:krate
+pub mod prelude {
+ pub use crate::foo::*;
+}
+
+pub mod foo {
+ pub struct Foo;
+}
+"#;
+ check_found_path(
+ ra_fixture,
+ "krate::foo::Foo",
+ "krate::foo::Foo",
+ "krate::foo::Foo",
+ "krate::foo::Foo",
+ );
+ check_found_path_prelude(
+ ra_fixture,
+ "krate::prelude::Foo",
+ "krate::prelude::Foo",
+ "krate::prelude::Foo",
+ "krate::prelude::Foo",
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
index 1e2535a8a..f5324f052 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -21,7 +21,7 @@ use crate::{
db::DefDatabase,
dyn_map::{keys, DynMap},
expander::Expander,
- item_tree::{AttrOwner, ItemTree},
+ item_tree::ItemTree,
lower::LowerCtx,
nameres::{DefMap, MacroSubNs},
src::{HasChildSource, HasSource},
@@ -222,12 +222,11 @@ impl GenericParams {
let module = loc.container.module(db);
let func_data = db.function_data(id);
- // Don't create an `Expander` nor call `loc.source(db)` if not needed since this
- // causes a reparse after the `ItemTree` has been created.
- let mut expander = Lazy::new(|| {
- (module.def_map(db), Expander::new(db, loc.source(db).file_id, module))
- });
- for param in &func_data.params {
+ // Don't create an `Expander` if not needed since this
+ // could cause a reparse after the `ItemTree` has been created due to the spanmap.
+ let mut expander =
+ Lazy::new(|| (module.def_map(db), Expander::new(db, loc.id.file_id(), module)));
+ for param in func_data.params.iter() {
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
}
@@ -250,7 +249,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
- add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
+ add_param_attrs: impl FnMut(
+ Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
+ ast::GenericParam,
+ ),
) {
if let Some(params) = node.generic_param_list() {
self.fill_params(lower_ctx, params, add_param_attrs)
@@ -275,7 +277,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
- mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
+ mut add_param_attrs: impl FnMut(
+ Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
+ ast::GenericParam,
+ ),
) {
for type_or_const_param in params.type_or_const_params() {
match type_or_const_param {
@@ -297,7 +302,7 @@ impl GenericParams {
type_param.type_bound_list(),
Either::Left(type_ref),
);
- add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param));
+ add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
}
ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@@ -310,7 +315,7 @@ impl GenericParams {
default: ConstRef::from_const_param(lower_ctx, &const_param),
};
let idx = self.type_or_consts.alloc(param.into());
- add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param));
+ add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
}
}
}
@@ -325,7 +330,7 @@ impl GenericParams {
lifetime_param.type_bound_list(),
Either::Right(lifetime_ref),
);
- add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param));
+ add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
}
}
@@ -433,7 +438,7 @@ impl GenericParams {
let ctx = expander.ctx(db);
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
- exp.1.exit(db, mark);
+ exp.1.exit(mark);
}
}
});
@@ -518,7 +523,7 @@ fn file_id_and_params_of(
(src.file_id, src.value.generic_param_list())
}
// We won't be using this ID anyway
- GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None),
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
index 75025a984..7fc33abc7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs
@@ -2,10 +2,11 @@
use std::mem;
use hir_expand::name::Name;
-use rustc_parse_format as parse;
+use rustc_dependencies::parse_format as parse;
+use stdx::TupleExt;
use syntax::{
ast::{self, IsString},
- AstToken, SmolStr, TextRange,
+ SmolStr, TextRange, TextSize,
};
use crate::hir::ExprId;
@@ -14,6 +15,7 @@ use crate::hir::ExprId;
pub struct FormatArgs {
pub template: Box<[FormatArgsPiece]>,
pub arguments: FormatArguments,
+ pub orphans: Vec<ExprId>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -170,15 +172,18 @@ pub(crate) fn parse(
mut args: FormatArgumentsCollector,
is_direct_literal: bool,
mut synth: impl FnMut(Name) -> ExprId,
+ mut record_usage: impl FnMut(Name, Option<TextRange>),
) -> FormatArgs {
- let text = s.text();
+ let text = s.text_without_quotes();
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = u32::from(offsets.quotes.0.len()) - 1;
- (raw != 0).then_some(raw as usize)
+ // subtract 1 for the `r` prefix
+ (raw != 0).then(|| raw as usize - 1)
}
None => None,
};
+
let mut parser =
parse::Parser::new(text, str_style, fmt_snippet, false, parse::ParseMode::Format);
@@ -193,12 +198,17 @@ pub(crate) fn parse(
let is_source_literal = parser.is_source_literal;
if !parser.errors.is_empty() {
// FIXME: Diagnose
- return FormatArgs { template: Default::default(), arguments: args.finish() };
+ return FormatArgs {
+ template: Default::default(),
+ arguments: args.finish(),
+ orphans: vec![],
+ };
}
let to_span = |inner_span: parse::InnerSpan| {
is_source_literal.then(|| {
TextRange::new(inner_span.start.try_into().unwrap(), inner_span.end.try_into().unwrap())
+ - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
@@ -230,9 +240,10 @@ pub(crate) fn parse(
Err(index)
}
}
- ArgRef::Name(name, _span) => {
+ ArgRef::Name(name, span) => {
let name = Name::new_text_dont_use(SmolStr::new(name));
if let Some((index, _)) = args.by_name(&name) {
+ record_usage(name, span);
// Name found in `args`, so we resolve it to its index.
if index < args.explicit_args().len() {
// Mark it as used, if it was an explicit argument.
@@ -246,6 +257,7 @@ pub(crate) fn parse(
// disabled (see RFC #2795)
// FIXME: Diagnose
}
+ record_usage(name.clone(), span);
Ok(args.add(FormatArgument {
kind: FormatArgumentKind::Captured(name.clone()),
// FIXME: This is problematic, we might want to synthesize a dummy
@@ -413,7 +425,11 @@ pub(crate) fn parse(
// FIXME: Diagnose
}
- FormatArgs { template: template.into_boxed_slice(), arguments: args.finish() }
+ FormatArgs {
+ template: template.into_boxed_slice(),
+ arguments: args.finish(),
+ orphans: unused.into_iter().map(TupleExt::head).collect(),
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index 44b7f1b4f..26d333f9a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -1,49 +1,49 @@
//! A map of all publicly exported items in a crate.
-use std::collections::hash_map::Entry;
use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId;
-use fst::{self, Streamer};
+use fst::{self, raw::IndexedValue, Streamer};
use hir_expand::name::Name;
use indexmap::IndexMap;
use itertools::Itertools;
-use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
+use rustc_hash::{FxHashSet, FxHasher};
+use smallvec::SmallVec;
+use stdx::format_to;
use triomphe::Arc;
-use crate::item_scope::ImportOrExternCrate;
use crate::{
- db::DefDatabase, item_scope::ItemInNs, nameres::DefMap, visibility::Visibility, AssocItemId,
- ModuleDefId, ModuleId, TraitId,
+ db::DefDatabase,
+ item_scope::{ImportOrExternCrate, ItemInNs},
+ nameres::DefMap,
+ visibility::Visibility,
+ AssocItemId, ModuleDefId, ModuleId, TraitId,
};
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
-// FIXME: Support aliases: an item may be exported under multiple names, so `ImportInfo` should
-// have `Vec<(Name, ModuleId)>` instead of `(Name, ModuleId)`.
/// Item import details stored in the `ImportMap`.
-#[derive(Debug, Clone, Eq, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ImportInfo {
/// A name that can be used to import the item, relative to the crate's root.
pub name: Name,
/// The module containing this item.
pub container: ModuleId,
- /// Whether the import is a trait associated item or not.
- pub is_trait_assoc_item: bool,
/// Whether this item is annotated with `#[doc(hidden)]`.
pub is_doc_hidden: bool,
/// Whether this item is annotated with `#[unstable(..)]`.
pub is_unstable: bool,
}
+type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
+
/// A map from publicly exported items to its name.
///
/// Reexports of items are taken into account, ie. if something is exported under multiple
/// names, the one with the shortest import path will be used.
#[derive(Default)]
pub struct ImportMap {
- map: FxIndexMap<ItemInNs, ImportInfo>,
-
+ map: ImportMapIndex,
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
/// values returned by running `fst`.
///
@@ -54,7 +54,25 @@ pub struct ImportMap {
fst: fst::Map<Vec<u8>>,
}
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd)]
+enum IsTraitAssocItem {
+ Yes,
+ No,
+}
+
impl ImportMap {
+ pub fn dump(&self, db: &dyn DefDatabase) -> String {
+ let mut out = String::new();
+ for (k, v) in self.map.iter() {
+ format_to!(out, "{:?} ({:?}) -> ", k, v.1);
+ for v in &v.0 {
+ format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
+ }
+ format_to!(out, "\n");
+ }
+ out
+ }
+
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query");
@@ -63,30 +81,40 @@ impl ImportMap {
let mut importables: Vec<_> = map
.iter()
// We've only collected items, whose name cannot be tuple field.
- .map(|(&item, info)| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
+ .flat_map(|(&item, (info, is_assoc))| {
+ info.iter().map(move |info| {
+ (item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase())
+ })
+ })
.collect();
- importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
+ importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| {
+ lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc))
+ });
+ importables.dedup();
// Build the FST, taking care not to insert duplicate values.
let mut builder = fst::MapBuilder::memory();
- let iter = importables.iter().enumerate().dedup_by(|lhs, rhs| lhs.1 .1 == rhs.1 .1);
- for (start_idx, (_, name)) in iter {
+ let iter = importables
+ .iter()
+ .enumerate()
+ .dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs);
+ for (start_idx, (_, _, name)) in iter {
let _ = builder.insert(name, start_idx as u64);
}
Arc::new(ImportMap {
map,
fst: builder.into_map(),
- importables: importables.into_iter().map(|(item, _)| item).collect(),
+ importables: importables.into_iter().map(|(item, _, _)| item).collect(),
})
}
- pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
- self.map.get(&item)
+ pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> {
+ self.map.get(&item).map(|(info, _)| &**info)
}
}
-fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemInNs, ImportInfo> {
+fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
let _p = profile::span("collect_import_map");
let def_map = db.crate_def_map(krate);
@@ -94,11 +122,13 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
// We look only into modules that are public(ly reexported), starting with the crate root.
let root = def_map.module_id(DefMap::ROOT);
- let mut worklist = vec![(root, 0)];
- // Records items' minimum module depth.
- let mut depth_map = FxHashMap::default();
+ let mut worklist = vec![root];
+ let mut visited = FxHashSet::default();
- while let Some((module, depth)) = worklist.pop() {
+ while let Some(module) = worklist.pop() {
+ if !visited.insert(module) {
+ continue;
+ }
let ext_def_map;
let mod_data = if module.krate == krate {
&def_map[module.local_id]
@@ -126,62 +156,18 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
ItemInNs::Macros(id) => Some(id.into()),
}
};
- let status @ (is_doc_hidden, is_unstable) =
- attr_id.map_or((false, false), |attr_id| {
- let attrs = db.attrs(attr_id);
- (attrs.has_doc_hidden(), attrs.is_unstable())
- });
+ let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| {
+ let attrs = db.attrs(attr_id);
+ (attrs.has_doc_hidden(), attrs.is_unstable())
+ });
let import_info = ImportInfo {
name: name.clone(),
container: module,
- is_trait_assoc_item: false,
is_doc_hidden,
is_unstable,
};
- match depth_map.entry(item) {
- Entry::Vacant(entry) => _ = entry.insert((depth, status)),
- Entry::Occupied(mut entry) => {
- let &(occ_depth, (occ_is_doc_hidden, occ_is_unstable)) = entry.get();
- (depth, occ_depth);
- let overwrite = match (
- is_doc_hidden,
- occ_is_doc_hidden,
- is_unstable,
- occ_is_unstable,
- ) {
- // no change of hiddeness or unstableness
- (true, true, true, true)
- | (true, true, false, false)
- | (false, false, true, true)
- | (false, false, false, false) => depth < occ_depth,
-
- // either less hidden or less unstable, accept
- (true, true, false, true)
- | (false, true, true, true)
- | (false, true, false, true)
- | (false, true, false, false)
- | (false, false, false, true) => true,
- // more hidden or unstable, discard
- (true, true, true, false)
- | (true, false, true, true)
- | (true, false, true, false)
- | (true, false, false, false)
- | (false, false, true, false) => false,
-
- // exchanges doc(hidden) for unstable (and vice-versa),
- (true, false, false, true) | (false, true, true, false) => {
- depth < occ_depth
- }
- };
- if !overwrite {
- continue;
- }
- entry.insert((depth, status));
- }
- }
-
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
collect_trait_assoc_items(
db,
@@ -192,13 +178,14 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
);
}
- map.insert(item, import_info);
+ let (infos, _) =
+ map.entry(item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::No));
+ infos.reserve_exact(1);
+ infos.push(import_info);
- // If we've just added a module, descend into it. We might traverse modules
- // multiple times, but only if the module depth is smaller (else we `continue`
- // above).
+ // If we've just added a module, descend into it.
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
- worklist.push((mod_id, depth + 1));
+ worklist.push(mod_id);
}
}
}
@@ -209,7 +196,7 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
fn collect_trait_assoc_items(
db: &dyn DefDatabase,
- map: &mut FxIndexMap<ItemInNs, ImportInfo>,
+ map: &mut ImportMapIndex,
tr: TraitId,
is_type_in_ns: bool,
trait_import_info: &ImportInfo,
@@ -236,11 +223,14 @@ fn collect_trait_assoc_items(
let assoc_item_info = ImportInfo {
container: trait_import_info.container,
name: assoc_item_name.clone(),
- is_trait_assoc_item: true,
is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(),
};
- map.insert(assoc_item, assoc_item_info);
+
+ let (infos, _) =
+ map.entry(assoc_item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::Yes));
+ infos.reserve_exact(1);
+ infos.push(assoc_item_info);
}
}
@@ -258,10 +248,13 @@ impl fmt::Debug for ImportMap {
let mut importable_names: Vec<_> = self
.map
.iter()
- .map(|(item, _)| match item {
- ItemInNs::Types(it) => format!("- {it:?} (t)",),
- ItemInNs::Values(it) => format!("- {it:?} (v)",),
- ItemInNs::Macros(it) => format!("- {it:?} (m)",),
+ .map(|(item, (infos, _))| {
+ let l = infos.len();
+ match item {
+ ItemInNs::Types(it) => format!("- {it:?} (t) [{l}]",),
+ ItemInNs::Values(it) => format!("- {it:?} (v) [{l}]",),
+ ItemInNs::Macros(it) => format!("- {it:?} (m) [{l}]",),
+ }
})
.collect();
@@ -271,13 +264,15 @@ impl fmt::Debug for ImportMap {
}
/// A way to match import map contents against the search query.
-#[derive(Debug)]
+#[derive(Copy, Clone, Debug)]
enum SearchMode {
/// Import map entry should strictly match the query string.
Exact,
/// Import map entry should contain all letters from the query string,
/// in the same order, but not necessary adjacent.
Fuzzy,
+ /// Import map entry should match the query string by prefix.
+ Prefix,
}
/// Three possible ways to search for the name in associated and/or other items.
@@ -319,6 +314,14 @@ impl Query {
Self { search_mode: SearchMode::Fuzzy, ..self }
}
+ pub fn prefix(self) -> Self {
+ Self { search_mode: SearchMode::Prefix, ..self }
+ }
+
+ pub fn exact(self) -> Self {
+ Self { search_mode: SearchMode::Exact, ..self }
+ }
+
/// Specifies whether we want to include associated items in the result.
pub fn assoc_search_mode(self, assoc_mode: AssocSearchMode) -> Self {
Self { assoc_mode, ..self }
@@ -334,33 +337,39 @@ impl Query {
Self { case_sensitive: true, ..self }
}
- fn import_matches(
- &self,
- db: &dyn DefDatabase,
- import: &ImportInfo,
- enforce_lowercase: bool,
- ) -> bool {
- let _p = profile::span("import_map::Query::import_matches");
- match (import.is_trait_assoc_item, self.assoc_mode) {
- (true, AssocSearchMode::Exclude) => return false,
- (false, AssocSearchMode::AssocItemsOnly) => return false,
- _ => {}
+ fn matches_assoc_mode(&self, is_trait_assoc_item: IsTraitAssocItem) -> bool {
+ match (is_trait_assoc_item, self.assoc_mode) {
+ (IsTraitAssocItem::Yes, AssocSearchMode::Exclude)
+ | (IsTraitAssocItem::No, AssocSearchMode::AssocItemsOnly) => false,
+ _ => true,
}
+ }
- let mut input = import.name.display(db.upcast()).to_string();
+ /// Checks whether the import map entry matches the query.
+ fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
+ let _p = profile::span("import_map::Query::import_matches");
+
+ // FIXME: Can we get rid of the alloc here?
+ let input = import.name.to_smol_str();
+ let mut _s_slot;
let case_insensitive = enforce_lowercase || !self.case_sensitive;
- if case_insensitive {
- input.make_ascii_lowercase();
- }
+ let input = if case_insensitive {
+ _s_slot = String::from(input);
+ _s_slot.make_ascii_lowercase();
+ &*_s_slot
+ } else {
+ &*input
+ };
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
match self.search_mode {
- SearchMode::Exact => &input == query_string,
+ SearchMode::Exact => input == *query_string,
+ SearchMode::Prefix => input.starts_with(query_string),
SearchMode::Fuzzy => {
let mut input_chars = input.chars();
for query_char in query_string.chars() {
- if input_chars.find(|&it| it == query_char).is_none() {
+ if !input_chars.any(|it| it == query_char) {
return false;
}
}
@@ -376,11 +385,12 @@ impl Query {
pub fn search_dependencies(
db: &dyn DefDatabase,
krate: CrateId,
- query: Query,
+ ref query: Query,
) -> FxHashSet<ItemInNs> {
let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
let graph = db.crate_graph();
+
let import_maps: Vec<_> =
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
@@ -394,31 +404,57 @@ pub fn search_dependencies(
let mut stream = op.union();
let mut res = FxHashSet::default();
+ let mut common_importable_data_scratch = vec![];
+ // FIXME: Improve this, its rather unreadable and does duplicate amount of work
while let Some((_, indexed_values)) = stream.next() {
- for indexed_value in indexed_values {
- let import_map = &import_maps[indexed_value.index];
- let importables = &import_map.importables[indexed_value.value as usize..];
-
- let common_importable_data = &import_map.map[&importables[0]];
- if !query.import_matches(db, common_importable_data, true) {
+ for &IndexedValue { index, value } in indexed_values {
+ let import_map = &import_maps[index];
+ let importables @ [importable, ..] = &import_map.importables[value as usize..] else {
+ continue;
+ };
+ let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
+ if !query.matches_assoc_mode(is_trait_assoc_item) {
continue;
}
- // Name shared by the importable items in this group.
- let common_importable_name =
- common_importable_data.name.to_smol_str().to_ascii_lowercase();
- // Add the items from this name group. Those are all subsequent items in
- // `importables` whose name match `common_importable_name`.
- let iter = importables
- .iter()
- .copied()
- .take_while(|item| {
- common_importable_name
- == import_map.map[item].name.to_smol_str().to_ascii_lowercase()
- })
- .filter(|item| {
- !query.case_sensitive // we've already checked the common importables name case-insensitively
- || query.import_matches(db, &import_map.map[item], false)
+ // Fetch all the known names of this importable item (to handle import aliases/renames)
+ common_importable_data_scratch.extend(
+ importable_data
+ .iter()
+ .filter(|&info| query.import_matches(info, true))
+ // Name shared by the importable items in this group.
+ .map(|info| info.name.to_smol_str()),
+ );
+ if common_importable_data_scratch.is_empty() {
+ continue;
+ }
+ common_importable_data_scratch.sort();
+ common_importable_data_scratch.dedup();
+
+ let iter =
+ common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
+ // Add the items from this name group. Those are all subsequent items in
+ // `importables` whose name match `common_importable_name`.
+
+ importables
+ .iter()
+ .copied()
+ .take_while(move |item| {
+ let &(ref import_infos, assoc_mode) = &import_map.map[item];
+ query.matches_assoc_mode(assoc_mode)
+ && import_infos.iter().any(|info| {
+ info.name
+ .to_smol_str()
+ .eq_ignore_ascii_case(&common_importable_name)
+ })
+ })
+ .filter(move |item| {
+ !query.case_sensitive || {
+ // we've already checked the common importables name case-insensitively
+ let &(ref import_infos, _) = &import_map.map[item];
+ import_infos.iter().any(|info| query.import_matches(info, false))
+ }
+ })
});
res.extend(iter);
@@ -445,6 +481,7 @@ mod tests {
let mut importable_paths: Vec<_> = self
.map
.iter()
+ .flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info)))
.map(|(item, info)| {
let path = render_path(db, info);
let ns = match item {
@@ -483,7 +520,7 @@ mod tests {
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
Some(assoc_item_path) => (assoc_item_path, "a"),
None => (
- render_path(&db, dependency_imports.import_info_for(dependency)?),
+ render_path(&db, &dependency_imports.import_info_for(dependency)?[0]),
match dependency {
ItemInNs::Types(ModuleDefId::FunctionId(_))
| ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
@@ -531,7 +568,12 @@ mod tests {
.items
.iter()
.find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?;
- Some(format!("{}::{}", render_path(db, trait_info), assoc_item_name.display(db.upcast())))
+ // FIXME: This should check all import infos, not just the first
+ Some(format!(
+ "{}::{}",
+ render_path(db, &trait_info[0]),
+ assoc_item_name.display(db.upcast())
+ ))
}
fn check(ra_fixture: &str, expect: Expect) {
@@ -607,6 +649,7 @@ mod tests {
main:
- publ1 (t)
- real_pu2 (t)
+ - real_pu2::Pub (t)
- real_pub (t)
- real_pub::Pub (t)
"#]],
@@ -632,6 +675,7 @@ mod tests {
- sub (t)
- sub::Def (t)
- sub::subsub (t)
+ - sub::subsub::Def (t)
"#]],
);
}
@@ -731,7 +775,9 @@ mod tests {
- module (t)
- module::S (t)
- module::S (v)
+ - module::module (t)
- sub (t)
+ - sub::module (t)
"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
index 7c11fb9d1..ce83cb435 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -112,6 +112,7 @@ pub struct ItemScope {
#[derive(Debug, PartialEq, Eq)]
struct DeriveMacroInvocation {
attr_id: AttrId,
+ /// The `#[derive]` call
attr_call_id: MacroCallId,
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
}
@@ -401,6 +402,14 @@ impl ItemScope {
})
}
+ pub fn derive_macro_invoc(
+ &self,
+ ast_id: AstId<ast::Adt>,
+ attr_id: AttrId,
+ ) -> Option<MacroCallId> {
+ Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id)
+ }
+
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index 4c812b62a..3d2cddffa 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -38,17 +38,15 @@ mod tests;
use std::{
fmt::{self, Debug},
hash::{Hash, Hasher},
- marker::PhantomData,
ops::Index,
};
use ast::{AstNode, HasName, StructKind};
-use base_db::CrateId;
+use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs,
- hygiene::Hygiene,
name::{name, AsName, Name},
ExpandTo, HirFileId, InFile,
};
@@ -108,18 +106,13 @@ impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
let syntax = db.parse_or_expand(file_id);
- if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
- {
- // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
- return Default::default();
- }
let ctx = lower::Ctx::new(db, file_id);
let mut top_attrs = None;
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
- top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene()));
+ top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
ctx.lower_module_items(&file)
},
ast::MacroItems(items) => {
@@ -131,6 +124,9 @@ impl ItemTree {
ctx.lower_macro_stmts(stmts)
},
_ => {
+ if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) {
+ return Default::default();
+ }
panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
},
}
@@ -340,34 +336,37 @@ pub trait ItemTreeNode: Clone {
fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem;
}
-pub struct FileItemTreeId<N: ItemTreeNode> {
- index: Idx<N>,
- _p: PhantomData<N>,
+pub struct FileItemTreeId<N: ItemTreeNode>(Idx<N>);
+
+impl<N: ItemTreeNode> FileItemTreeId<N> {
+ pub fn index(&self) -> Idx<N> {
+ self.0
+ }
}
impl<N: ItemTreeNode> Clone for FileItemTreeId<N> {
fn clone(&self) -> Self {
- Self { index: self.index, _p: PhantomData }
+ Self(self.0)
}
}
impl<N: ItemTreeNode> Copy for FileItemTreeId<N> {}
impl<N: ItemTreeNode> PartialEq for FileItemTreeId<N> {
fn eq(&self, other: &FileItemTreeId<N>) -> bool {
- self.index == other.index
+ self.0 == other.0
}
}
impl<N: ItemTreeNode> Eq for FileItemTreeId<N> {}
impl<N: ItemTreeNode> Hash for FileItemTreeId<N> {
fn hash<H: Hasher>(&self, state: &mut H) {
- self.index.hash(state)
+ self.0.hash(state)
}
}
impl<N: ItemTreeNode> fmt::Debug for FileItemTreeId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.index.fmt(f)
+ self.0.fmt(f)
}
}
@@ -548,7 +547,7 @@ impl Index<RawVisibilityId> for ItemTree {
impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
type Output = N;
fn index(&self, id: FileItemTreeId<N>) -> &N {
- N::lookup(self, id.index)
+ N::lookup(self, id.index())
}
}
@@ -613,10 +612,17 @@ pub struct Function {
pub(crate) flags: FnFlags,
}
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub enum Param {
- Normal(Interned<TypeRef>),
- Varargs,
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Param {
+ /// This is [`None`] for varargs
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub ast_id: ParamAstId,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ParamAstId {
+ Param(FileAstId<ast::Param>),
+ SelfParam(FileAstId<ast::SelfParam>),
}
bitflags::bitflags! {
@@ -702,6 +708,7 @@ pub struct Impl {
pub target_trait: Option<Interned<TraitRef>>,
pub self_ty: Interned<TypeRef>,
pub is_negative: bool,
+ pub is_unsafe: bool,
pub items: Box<[AssocItem]>,
pub ast_id: FileAstId<ast::Impl>,
}
@@ -739,6 +746,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo,
+ pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@@ -768,9 +776,9 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
- let hygiene = Hygiene::new(db.upcast(), file_id);
- let (_, source_map) =
- lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
+ let span_map = db.span_map(file_id);
+ let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
+ .expect("failed to lower use tree");
source_map[index].clone()
}
/// Maps a `UseTree` contained in this import back to its AST node.
@@ -783,8 +791,10 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
- let hygiene = Hygiene::new(db.upcast(), file_id);
- lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
+ let span_map = db.span_map(file_id);
+ lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
+ .expect("failed to lower use tree")
+ .1
}
}
@@ -917,23 +927,23 @@ impl ModItem {
pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
match self {
- ModItem::Use(it) => tree[it.index].ast_id().upcast(),
- ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
- ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
- ModItem::Function(it) => tree[it.index].ast_id().upcast(),
- ModItem::Struct(it) => tree[it.index].ast_id().upcast(),
- ModItem::Union(it) => tree[it.index].ast_id().upcast(),
- ModItem::Enum(it) => tree[it.index].ast_id().upcast(),
- ModItem::Const(it) => tree[it.index].ast_id().upcast(),
- ModItem::Static(it) => tree[it.index].ast_id().upcast(),
- ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
- ModItem::TraitAlias(it) => tree[it.index].ast_id().upcast(),
- ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
- ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
- ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
- ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(),
- ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(),
- ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Use(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::ExternCrate(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::ExternBlock(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Function(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Struct(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Union(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Enum(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Const(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Static(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Trait(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::TraitAlias(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Impl(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::TypeAlias(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::Mod(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(),
+ ModItem::MacroDef(it) => tree[it.index()].ast_id().upcast(),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index e4702c113..83a2790ce 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -2,18 +2,19 @@
use std::collections::hash_map::Entry;
-use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
+use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId};
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{
generics::{GenericParams, TypeParamData, TypeParamProvenance},
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
+ LocalLifetimeParamId, LocalTypeOrConstParamId,
};
use super::*;
fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
- FileItemTreeId { index, _p: PhantomData }
+ FileItemTreeId(index)
}
pub(super) struct Ctx<'a> {
@@ -33,8 +34,8 @@ impl<'a> Ctx<'a> {
}
}
- pub(super) fn hygiene(&self) -> &Hygiene {
- self.body_ctx.hygiene()
+ pub(super) fn span_map(&self) -> SpanMapRef<'_> {
+ self.body_ctx.span_map()
}
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
@@ -79,7 +80,7 @@ impl<'a> Ctx<'a> {
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree
.attrs
- .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene()));
+ .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
self.tree.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@@ -109,8 +110,7 @@ impl<'a> Ctx<'a> {
}
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
- let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene());
- let item: ModItem = match item {
+ let mod_item: ModItem = match item {
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
@@ -129,10 +129,10 @@ impl<'a> Ctx<'a> {
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
};
+ let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
+ self.add_attrs(mod_item.into(), attrs);
- self.add_attrs(item.into(), attrs);
-
- Some(item)
+ Some(mod_item)
}
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
@@ -146,21 +146,32 @@ impl<'a> Ctx<'a> {
}
}
- fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
- match item {
+ fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
+ let item: AssocItem = match item_node {
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
- }
+ }?;
+ let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
+ self.add_attrs(
+ match item {
+ AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
+ AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
+ AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
+ AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
+ },
+ attrs,
+ );
+ Some(item)
}
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name();
+ let ast_id = self.source_ast_id_map.ast_id(strukt);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
let fields = self.lower_fields(&strukt.kind());
- let ast_id = self.source_ast_id_map.ast_id(strukt);
let res = Struct { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().structs.alloc(res)))
}
@@ -184,7 +195,10 @@ impl<'a> Ctx<'a> {
for field in fields.fields() {
if let Some(data) = self.lower_record_field(&field) {
let idx = self.data().fields.alloc(data);
- self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
+ self.add_attrs(
+ idx.into(),
+ RawAttrs::new(self.db.upcast(), &field, self.span_map()),
+ );
}
}
let end = self.next_field_idx();
@@ -205,7 +219,7 @@ impl<'a> Ctx<'a> {
for (i, field) in fields.fields().enumerate() {
let data = self.lower_tuple_field(i, &field);
let idx = self.data().fields.alloc(data);
- self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
+ self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map()));
}
let end = self.next_field_idx();
IdxRange::new(start..end)
@@ -222,12 +236,12 @@ impl<'a> Ctx<'a> {
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
let visibility = self.lower_visibility(union);
let name = union.name()?.as_name();
+ let ast_id = self.source_ast_id_map.ast_id(union);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
let fields = match union.record_field_list() {
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
};
- let ast_id = self.source_ast_id_map.ast_id(union);
let res = Union { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().unions.alloc(res)))
}
@@ -235,12 +249,12 @@ impl<'a> Ctx<'a> {
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name();
+ let ast_id = self.source_ast_id_map.ast_id(enum_);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
let variants = match &enum_.variant_list() {
Some(variant_list) => self.lower_variants(variant_list),
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
};
- let ast_id = self.source_ast_id_map.ast_id(enum_);
let res = Enum { name, visibility, generic_params, variants, ast_id };
Some(id(self.data().enums.alloc(res)))
}
@@ -252,7 +266,7 @@ impl<'a> Ctx<'a> {
let idx = self.data().variants.alloc(data);
self.add_attrs(
idx.into(),
- RawAttrs::new(self.db.upcast(), &variant, self.hygiene()),
+ RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
);
}
}
@@ -295,24 +309,37 @@ impl<'a> Ctx<'a> {
}
}
};
- let ty = Interned::new(self_type);
- let idx = self.data().params.alloc(Param::Normal(ty));
+ let type_ref = Interned::new(self_type);
+ let ast_id = self.source_ast_id_map.ast_id(&self_param);
+ let idx = self.data().params.alloc(Param {
+ type_ref: Some(type_ref),
+ ast_id: ParamAstId::SelfParam(ast_id),
+ });
self.add_attrs(
idx.into(),
- RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
+ RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
);
has_self_param = true;
}
for param in param_list.params() {
+ let ast_id = self.source_ast_id_map.ast_id(&param);
let idx = match param.dotdotdot_token() {
- Some(_) => self.data().params.alloc(Param::Varargs),
+ Some(_) => self
+ .data()
+ .params
+ .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }),
None => {
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
let ty = Interned::new(type_ref);
- self.data().params.alloc(Param::Normal(ty))
+ self.data()
+ .params
+ .alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
}
};
- self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene()));
+ self.add_attrs(
+ idx.into(),
+ RawAttrs::new(self.db.upcast(), &param, self.span_map()),
+ );
}
}
let end_param = self.next_param_idx();
@@ -382,16 +409,9 @@ impl<'a> Ctx<'a> {
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
let visibility = self.lower_visibility(type_alias);
let bounds = self.lower_type_bounds(type_alias);
- let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias);
- let res = TypeAlias {
- name,
- visibility,
- bounds: bounds.into_boxed_slice(),
- generic_params,
- type_ref,
- ast_id,
- };
+ let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
+ let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
Some(id(self.data().type_aliases.alloc(res)))
}
@@ -438,23 +458,17 @@ impl<'a> Ctx<'a> {
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def);
+ let ast_id = self.source_ast_id_map.ast_id(trait_def);
let generic_params =
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
let is_auto = trait_def.auto_token().is_some();
let is_unsafe = trait_def.unsafe_token().is_some();
- let ast_id = self.source_ast_id_map.ast_id(trait_def);
let items = trait_def
.assoc_item_list()
.into_iter()
.flat_map(|list| list.assoc_items())
- .filter_map(|item| {
- let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
- self.lower_assoc_item(&item).map(|item| {
- self.add_attrs(ModItem::from(item).into(), attrs);
- item
- })
- })
+ .filter_map(|item_node| self.lower_assoc_item(&item_node))
.collect();
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
@@ -467,17 +481,18 @@ impl<'a> Ctx<'a> {
) -> Option<FileItemTreeId<TraitAlias>> {
let name = trait_alias_def.name()?.as_name();
let visibility = self.lower_visibility(trait_alias_def);
+ let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let generic_params = self.lower_generic_params(
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
trait_alias_def,
);
- let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let alias = TraitAlias { name, visibility, generic_params, ast_id };
Some(id(self.data().trait_aliases.alloc(alias)))
}
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
+ let ast_id = self.source_ast_id_map.ast_id(impl_def);
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver.
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
@@ -487,28 +502,24 @@ impl<'a> Ctx<'a> {
let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr));
let self_ty = self.lower_type_ref(&impl_def.self_ty()?);
let is_negative = impl_def.excl_token().is_some();
+ let is_unsafe = impl_def.unsafe_token().is_some();
// We cannot use `assoc_items()` here as that does not include macro calls.
let items = impl_def
.assoc_item_list()
.into_iter()
.flat_map(|it| it.assoc_items())
- .filter_map(|item| {
- let assoc = self.lower_assoc_item(&item)?;
- let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
- self.add_attrs(ModItem::from(assoc).into(), attrs);
- Some(assoc)
- })
+ .filter_map(|item| self.lower_assoc_item(&item))
.collect();
- let ast_id = self.source_ast_id_map.ast_id(impl_def);
- let res = Impl { generic_params, target_trait, self_ty, is_negative, items, ast_id };
+ let res =
+ Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
Some(id(self.data().impls.alloc(res)))
}
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
- let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
+ let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
let res = Use { visibility, ast_id, use_tree };
Some(id(self.data().uses.alloc(res)))
@@ -530,10 +541,16 @@ impl<'a> Ctx<'a> {
}
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
- let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
+ let span_map = self.span_map();
+ let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m);
- let res = MacroCall { path, ast_id, expand_to };
+ let res = MacroCall {
+ path,
+ ast_id,
+ expand_to,
+ call_site: span_map.span_for_range(m.syntax().text_range()).ctx,
+ };
Some(id(self.data().macro_calls.alloc(res)))
}
@@ -565,15 +582,15 @@ impl<'a> Ctx<'a> {
// (in other words, the knowledge that they're in an extern block must not be used).
// This is because an extern block can contain macros whose ItemTree's top-level items
// should be considered to be in an extern block too.
- let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
- let id: ModItem = match item {
- ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
- ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
- ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
- ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
+ let mod_item: ModItem = match &item {
+ ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
+ ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
+ ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
+ ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
- self.add_attrs(id.into(), attrs);
- Some(id)
+ let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
+ self.add_attrs(mod_item.into(), attrs);
+ Some(mod_item)
})
.collect()
});
@@ -605,12 +622,16 @@ impl<'a> Ctx<'a> {
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
}
- let add_param_attrs = |item, param| {
- let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.hygiene());
+ let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
+ param| {
+ let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.span_map());
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
// because it requires `&mut self` and the call to `generics.fill()` below also
// references `self`.
- match self.tree.attrs.entry(item) {
+ match self.tree.attrs.entry(match item {
+ Either::Right(id) => id.into(),
+ Either::Left(id) => id.into(),
+ }) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
@@ -625,18 +646,19 @@ impl<'a> Ctx<'a> {
Interned::new(generics)
}
- fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Vec<Interned<TypeBound>> {
+ fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Box<[Interned<TypeBound>]> {
match node.type_bound_list() {
Some(bound_list) => bound_list
.bounds()
.map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it)))
.collect(),
- None => Vec::new(),
+ None => Box::default(),
}
}
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
- let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
+ let vis =
+ RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map());
self.data().vis.alloc(vis)
}
@@ -714,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
struct UseTreeLowering<'a> {
db: &'a dyn DefDatabase,
- hygiene: &'a Hygiene,
+ span_map: SpanMapRef<'a>,
mapping: Arena<ast::UseTree>,
}
@@ -727,7 +749,7 @@ impl UseTreeLowering<'_> {
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => {
- match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
+ match ModPath::from_src(self.db.upcast(), path, self.span_map) {
Some(it) => Some(it),
None => return None, // FIXME: report errors somewhere
}
@@ -746,7 +768,7 @@ impl UseTreeLowering<'_> {
} else {
let is_glob = tree.star_token().is_some();
let path = match tree.path() {
- Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
+ Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?),
None => None,
};
let alias = tree.rename().map(|a| {
@@ -782,10 +804,10 @@ impl UseTreeLowering<'_> {
pub(crate) fn lower_use_tree(
db: &dyn DefDatabase,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
tree: ast::UseTree,
) -> Option<(UseTree, Arena<ast::UseTree>)> {
- let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
+ let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() };
let tree = lowering.lower_use_tree(tree)?;
Some((tree, lowering.mapping))
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 417bd37c8..244111d20 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -261,15 +261,15 @@ impl Printer<'_> {
self.indented(|this| {
for param in params.clone() {
this.print_attrs_of(param, "\n");
- match &this.tree[param] {
- Param::Normal(ty) => {
+ match &this.tree[param].type_ref {
+ Some(ty) => {
if flags.contains(FnFlags::HAS_SELF_PARAM) {
w!(this, "self: ");
}
this.print_type_ref(ty);
wln!(this, ",");
}
- Param::Varargs => {
+ None => {
wln!(this, "...");
}
};
@@ -388,8 +388,18 @@ impl Printer<'_> {
wln!(self);
}
ModItem::Impl(it) => {
- let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
- &self.tree[it];
+ let Impl {
+ target_trait,
+ self_ty,
+ is_negative,
+ is_unsafe,
+ items,
+ generic_params,
+ ast_id: _,
+ } = &self.tree[it];
+ if *is_unsafe {
+ w!(self, "unsafe");
+ }
w!(self, "impl");
self.print_generic_params(generic_params);
w!(self, " ");
@@ -447,7 +457,7 @@ impl Printer<'_> {
}
}
ModItem::MacroCall(it) => {
- let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
+ let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it];
wln!(self, "{}!(...);", path.display(self.db.upcast()));
}
ModItem::MacroRules(it) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
index 4180f8172..96c65b941 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -370,3 +370,15 @@ struct S<#[cfg(never)] T>;
"#]],
)
}
+
+#[test]
+fn pub_self() {
+ check(
+ r#"
+pub(self) struct S;
+ "#,
+ expect![[r#"
+ pub(self) struct S;
+ "#]],
+ )
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 3f87fe62b..b5333861c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -7,7 +7,8 @@
//! Note that `hir_def` is a work in progress, so not all of the above is
//! actually true.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
macro_rules! eprintln {
@@ -48,7 +49,7 @@ pub mod visibility;
pub mod find_path;
pub mod import_map;
-pub use rustc_abi as layout;
+pub use rustc_dependencies::abi as layout;
use triomphe::Arc;
#[cfg(test)]
@@ -62,7 +63,7 @@ use std::{
panic::{RefUnwindSafe, UnwindSafe},
};
-use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
+use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind};
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
@@ -71,18 +72,18 @@ use hir_expand::{
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
- hygiene::Hygiene,
+ name::Name,
proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
- MacroDefId, MacroDefKind, UnresolvedMacro,
+ MacroDefId, MacroDefKind,
};
use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
use stdx::impl_from;
-use syntax::ast;
+use syntax::{ast, AstNode};
-use ::tt::token_id as tt;
+pub use hir_expand::tt;
use crate::{
builtin_type::BuiltinType,
@@ -150,7 +151,7 @@ impl TryFrom<ModuleId> for CrateRootModuleId {
}
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ModuleId {
krate: CrateId,
/// If this `ModuleId` was derived from a `DefMap` for a block expression, this stores the
@@ -173,6 +174,18 @@ impl ModuleId {
self.krate
}
+ pub fn name(self, db: &dyn db::DefDatabase) -> Option<Name> {
+ let def_map = self.def_map(db);
+ let parent = def_map[self.local_id].parent?;
+ def_map[parent].children.iter().find_map(|(name, module_id)| {
+ if *module_id == self.local_id {
+ Some(name.clone())
+ } else {
+ None
+ }
+ })
+ }
+
pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id)
}
@@ -498,10 +511,7 @@ impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId);
impl MacroId {
pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool {
- match self {
- MacroId::ProcMacroId(it) => it.lookup(db).kind == ProcMacroKind::Attr,
- _ => false,
- }
+ matches!(self, MacroId::ProcMacroId(it) if it.lookup(db).kind == ProcMacroKind::Attr)
}
}
@@ -559,6 +569,8 @@ pub struct ConstBlockLoc {
pub root: hir::ExprId,
}
+/// Something that holds types, required for the current const arg lowering implementation as they
+/// need to be able to query where they are defined.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub enum TypeOwnerId {
FunctionId(FunctionId),
@@ -571,9 +583,6 @@ pub enum TypeOwnerId {
TypeAliasId(TypeAliasId),
ImplId(ImplId),
EnumVariantId(EnumVariantId),
- // FIXME(const-generic-body): ModuleId should not be a type owner. This needs to be fixed to make `TypeOwnerId` actually
- // useful for assigning ids to in type consts.
- ModuleId(ModuleId),
}
impl TypeOwnerId {
@@ -587,9 +596,7 @@ impl TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
- TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
- return None
- }
+ TypeOwnerId::InTypeConstId(_) | TypeOwnerId::StaticId(_) => return None,
})
}
}
@@ -604,8 +611,7 @@ impl_from!(
TraitAliasId,
TypeAliasId,
ImplId,
- EnumVariantId,
- ModuleId
+ EnumVariantId
for TypeOwnerId
);
@@ -703,12 +709,15 @@ pub struct InTypeConstLoc {
pub id: AstId<ast::ConstArg>,
/// The thing this const arg appears in
pub owner: TypeOwnerId,
- pub thing: Box<dyn OpaqueInternableThing>,
+ // FIXME(const-generic-body): The expected type should not be
+ pub expected_ty: Box<dyn OpaqueInternableThing>,
}
impl PartialEq for InTypeConstLoc {
fn eq(&self, other: &Self) -> bool {
- self.id == other.id && self.owner == other.owner && &*self.thing == &*other.thing
+ self.id == other.id
+ && self.owner == other.owner
+ && &*self.expected_ty == &*other.expected_ty
}
}
@@ -1031,7 +1040,6 @@ impl HasModule for TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
TypeOwnerId::ImplId(it) => it.lookup(db).container,
TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
- TypeOwnerId::ModuleId(it) => *it,
}
}
}
@@ -1155,16 +1163,20 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
- let h = Hygiene::new(db, self.file_id);
- let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h));
+ let span_map = db.span_map(self.file_id);
+ let path =
+ self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref()));
let Some(path) = path else {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
+ let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx;
+
macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
+ call_site,
expands_to,
krate,
resolver,
@@ -1189,17 +1201,19 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
+ call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
- macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
+ macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
.map(|res| res.value)
}
fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
+ call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@@ -1211,7 +1225,7 @@ fn macro_call_as_call_id_with_eager(
let res = match def.kind {
MacroDefKind::BuiltInEager(..) => {
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
- expand_eager_macro_input(db, krate, macro_call, def, &|path| {
+ expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| {
eager_resolver(path).filter(MacroDefId::is_fn_like)
})
}
@@ -1220,6 +1234,7 @@ fn macro_call_as_call_id_with_eager(
db,
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
+ call_site,
)),
err: None,
},
@@ -1304,6 +1319,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
+ call_site: SyntaxContextId,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@@ -1318,6 +1334,7 @@ fn derive_macro_as_call_id(
derive_index: derive_pos,
derive_attr_index,
},
+ call_site,
);
Ok((macro_id, def_id, call_id))
}
@@ -1330,15 +1347,13 @@ fn attr_macro_as_call_id(
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
- Some(AttrInput::TokenTree(tt)) => (
- {
- let mut tt = tt.0.clone();
- tt.delimiter = tt::Delimiter::UNSPECIFIED;
- tt
- },
- tt.1.clone(),
- ),
- _ => (tt::Subtree::empty(), Default::default()),
+ Some(AttrInput::TokenTree(tt)) => {
+ let mut tt = tt.as_ref().clone();
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ Some(tt)
+ }
+
+ _ => None,
};
def.as_lazy_macro(
@@ -1346,11 +1361,18 @@ fn attr_macro_as_call_id(
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
- attr_args: Arc::new(arg),
+ attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
+ macro_attr.ctxt,
)
}
+
+#[derive(Debug)]
+pub struct UnresolvedMacro {
+ pub path: hir_expand::mod_path::ModPath,
+}
+
intern::impl_internable!(
crate::type_ref::TypeRef,
crate::type_ref::TraitRef,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
index 52781d988..a3505b65f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
@@ -3,7 +3,7 @@ use std::cell::OnceCell;
use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode},
- hygiene::Hygiene,
+ span::{SpanMap, SpanMapRef},
AstId, HirFileId, InFile,
};
use syntax::ast;
@@ -13,33 +13,34 @@ use crate::{db::DefDatabase, path::Path};
pub struct LowerCtx<'a> {
pub db: &'a dyn DefDatabase,
- hygiene: Hygiene,
+ span_map: SpanMap,
+ // FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
}
impl<'a> LowerCtx<'a> {
- pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self {
- LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) }
+ pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self {
+ LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) }
}
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx {
db,
- hygiene: Hygiene::new(db.upcast(), file_id),
+ span_map: db.span_map(file_id),
ast_id_map: Some((file_id, OnceCell::new())),
}
}
- pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
- LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
+ pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self {
+ LowerCtx { db, span_map, ast_id_map: None }
}
- pub(crate) fn hygiene(&self) -> &Hygiene {
- &self.hygiene
+ pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
+ self.span_map.as_ref()
}
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
- Path::from_src(ast, self)
+ Path::from_src(self, ast)
}
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 4aedb22c6..514219ee7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -17,7 +17,7 @@ fn main() { column!(); }
#[rustc_builtin_macro]
macro_rules! column {() => {}}
-fn main() { 0 as u32; }
+fn main() { 0u32; }
"#]],
);
}
@@ -74,7 +74,7 @@ fn main() { line!() }
#[rustc_builtin_macro]
macro_rules! line {() => {}}
-fn main() { 0 as u32 }
+fn main() { 0u32 }
"#]],
);
}
@@ -468,12 +468,12 @@ macro_rules! concat_bytes {}
fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
"##,
- expect![[r##"
+ expect![[r#"
#[rustc_builtin_macro]
macro_rules! concat_bytes {}
fn main() { [b'A', 66, 67, 68, b'E', 70]; }
-"##]],
+"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index d09062132..9bf2a50d5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check;
fn token_mapping_smoke_test() {
check(
r#"
-// +tokenids
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
@@ -24,26 +23,22 @@ macro_rules! f {
};
}
-// +tokenids
+// +spans+syntaxctxt
f!(struct MyTraitMap2);
"#,
- expect![[r##"
-// call ids will be shifted by Shift(30)
-// +tokenids
-macro_rules! f {#0
- (#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
- struct#10 $#11ident#12 {#13
- map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
- }#13
- }#9;#29
-}#0
-
-// // +tokenids
-// f!(struct#1 MyTraitMap2#2);
-struct#10 MyTraitMap2#32 {#13
- map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
-}#13
-"##]],
+ expect![[r#"
+macro_rules! f {
+ ( struct $ident:ident ) => {
+ struct $ident {
+ map: ::std::collections::HashSet<()>,
+ }
+ };
+}
+
+struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2#
+ map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2#
+}#FileId(0):1@132..133\2#
+"#]],
);
}
@@ -53,49 +48,42 @@ fn token_mapping_floats() {
// (and related issues)
check(
r#"
-// +tokenids
+// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
-// +tokenids
+// +spans+syntaxctxt
f! {
fn main() {
1;
1.0;
+ ((1,),).0.0;
let x = 1;
}
}
"#,
- expect![[r##"
-// call ids will be shifted by Shift(18)
-// +tokenids
-macro_rules! f {#0
- (#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
- $#12(#13$#14tt#15)#13*#16
- }#11;#17
-}#0
-
-// // +tokenids
-// f! {
-// fn#1 main#2() {
-// 1#5;#6
-// 1.0#7;#8
-// let#9 x#10 =#11 1#12;#13
-// }
-// }
-fn#19 main#20(#21)#21 {#22
- 1#23;#24
- 1.0#25;#26
- let#27 x#28 =#29 1#30;#31
-}#22
+ expect![[r#"
+// +spans+syntaxctxt
+macro_rules! f {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0#
+ 1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0#
+ 1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0#
+ (#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0#
+ let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0#
+}#FileId(0):2@110..111\0#
-"##]],
+
+"#]],
);
}
@@ -105,53 +93,86 @@ fn eager_expands_with_unresolved_within() {
r#"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
+macro_rules! concat {}
+macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+}
fn main(foo: ()) {
- format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+ concat!("hello", identity!("world"), unresolved!(), identity!("!"));
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
+macro_rules! concat {}
+macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+}
fn main(foo: ()) {
- builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+ /* error: unresolved macro unresolved */"helloworld!";
}
"##]],
);
}
#[test]
-fn token_mapping_eager() {
+fn concat_spans() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
-
+macro_rules! concat {}
macro_rules! identity {
- ($expr:expr) => { $expr };
+ ($tt:tt) => {
+ $tt
+ }
}
fn main(foo: ()) {
- format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat {}
+ macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+ }
+
+ fn main(foo: ()) {
+ concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!"));
+ }
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
-macro_rules! format_args {}
-
+macro_rules! concat {}
macro_rules! identity {
- ($expr:expr) => { $expr };
+ ($tt:tt) => {
+ $tt
+ }
}
fn main(foo: ()) {
- // format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17)
-builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat {}
+ macro_rules! identity {
+ ($tt:tt) => {
+ $tt
+ }
+ }
+
+ fn main(foo: ()) {
+ /* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#;
+ }
}
"##]],
@@ -159,6 +180,29 @@ builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_
}
#[test]
+fn token_mapping_across_files() {
+ check(
+ r#"
+//- /lib.rs
+#[macro_use]
+mod foo;
+
+mk_struct/*+spans+syntaxctxt*/!(Foo with u32);
+//- /foo.rs
+macro_rules! mk_struct {
+ ($foo:ident with $ty:ty) => { struct $foo($ty); }
+}
+"#,
+ expect![[r#"
+#[macro_use]
+mod foo;
+
+struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2#
+"#]],
+ );
+}
+
+#[test]
fn float_field_access_macro_input() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index b416f45ff..71ba49721 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -13,37 +13,97 @@ fn test_vec() {
check(
r#"
macro_rules! vec {
- ($($item:expr),*) => {{
- let mut v = Vec::new();
- $( v.push($item); )*
- v
- }};
+ () => (
+ $crate::__rust_force_expr!($crate::vec::Vec::new())
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
+ );
+ ($($x:expr),+ $(,)?) => (
+ $crate::__rust_force_expr!(<[_]>::into_vec(
+ // This rustc_box is not required, but it produces a dramatic improvement in compile
+ // time when constructing arrays with many elements.
+ #[rustc_box]
+ $crate::boxed::Box::new([$($x),+])
+ ))
+ );
+}
+
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
}
+
fn main() {
vec!();
vec![1u32,2];
+ vec![a.];
}
"#,
expect![[r#"
macro_rules! vec {
- ($($item:expr),*) => {{
- let mut v = Vec::new();
- $( v.push($item); )*
- v
- }};
+ () => (
+ $crate::__rust_force_expr!($crate::vec::Vec::new())
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::__rust_force_expr!($crate::vec::from_elem($elem, $n))
+ );
+ ($($x:expr),+ $(,)?) => (
+ $crate::__rust_force_expr!(<[_]>::into_vec(
+ // This rustc_box is not required, but it produces a dramatic improvement in compile
+ // time when constructing arrays with many elements.
+ #[rustc_box]
+ $crate::boxed::Box::new([$($x),+])
+ ))
+ );
}
+
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
+}
+
fn main() {
- {
- let mut v = Vec::new();
- v
+ $crate::__rust_force_expr!($crate:: vec:: Vec:: new());
+ $crate::__rust_force_expr!(<[_]>:: into_vec(#[rustc_box]$crate:: boxed:: Box:: new([1u32, 2])));
+ /* error: expected Expr */$crate::__rust_force_expr!($crate:: vec:: from_elem((a.), $n));
+}
+"#]],
+ );
+ // FIXME we should ahev testing infra for multi level expansion tests
+ check(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
};
- {
- let mut v = Vec::new();
- v.push(1u32);
- v.push(2);
- v
+}
+
+fn main() {
+ __rust_force_expr!(crate:: vec:: Vec:: new());
+ __rust_force_expr!(<[_]>:: into_vec(#[rustc_box] crate:: boxed:: Box:: new([1u32, 2])));
+ __rust_force_expr/*+errors*/!(crate:: vec:: from_elem((a.), $n));
+}
+"#,
+ expect![[r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
};
}
+
+fn main() {
+ (crate ::vec::Vec::new());
+ (<[_]>::into_vec(#[rustc_box] crate ::boxed::Box::new([1u32, 2])));
+ /* error: expected Expr *//* parse error: expected field name or number */
+/* parse error: expected expression */
+/* parse error: expected R_PAREN */
+/* parse error: expected COMMA */
+/* parse error: expected expression, item or let statement */
+(crate ::vec::from_elem((a.), $n));
+}
"#]],
);
}
@@ -970,3 +1030,63 @@ builtin #format_args ("{}", &[0 2]);
"##]],
);
}
+
+#[test]
+fn eager_concat_line() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat {}
+
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! line {}
+
+fn main() {
+ concat!("event ", line!());
+}
+
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat {}
+
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! line {}
+
+fn main() {
+ "event 0u32";
+}
+
+"##]],
+ );
+}
+
+#[test]
+fn eager_concat_bytes_panic() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat_bytes {}
+
+fn main() {
+ let x = concat_bytes!(2);
+}
+
+"#,
+ expect![[r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat_bytes {}
+
+fn main() {
+ let x = /* error: unexpected token in input */[];
+}
+
+"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index 8adced4e0..be2a503d8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -16,21 +16,16 @@ mod proc_macros;
use std::{iter, ops::Range, sync};
-use ::mbe::TokenMap;
-use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
+use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
use expect_test::Expect;
-use hir_expand::{
- db::{DeclarativeMacroExpander, ExpandDatabase},
- AstId, InFile, MacroFile,
-};
+use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt};
use stdx::format_to;
use syntax::{
ast::{self, edit::IndentLevel},
- AstNode, SyntaxElement,
- SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
- SyntaxNode, TextRange, T,
+ AstNode,
+ SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
+ SyntaxNode, T,
};
-use tt::token_id::{Subtree, TokenId};
use crate::{
db::DefDatabase,
@@ -39,6 +34,7 @@ use crate::{
resolver::HasResolver,
src::HasSource,
test_db::TestDB,
+ tt::Subtree,
AdtId, AsMacroCall, Lookup, ModuleDefId,
};
@@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let mut text_edits = Vec::new();
let mut expansions = Vec::new();
- for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
- let mut show_token_ids = false;
- for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
- show_token_ids |= comment.to_string().contains("+tokenids");
- }
- if !show_token_ids {
- continue;
- }
-
- let call_offset = macro_.syntax().text_range().start().into();
- let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
- let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
-
- let DeclarativeMacroExpander { mac, def_site_token_map } =
- &*db.decl_macro_expander(krate, ast_id);
- assert_eq!(mac.err(), None);
- let tt = match &macro_ {
- ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
- ast::Macro::MacroDef(_) => unimplemented!(""),
- };
-
- let tt_start = tt.syntax().text_range().start();
- tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
- |token| {
- let range = token.text_range().checked_sub(tt_start).unwrap();
- if let Some(id) = def_site_token_map.token_by_range(range) {
- let offset = (range.end() + tt_start).into();
- text_edits.push((offset..offset, format!("#{}", id.0)));
- }
- },
- );
- text_edits.push((
- call_offset..call_offset,
- format!("// call ids will be shifted by {:?}\n", mac.shift()),
- ));
- }
-
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let macro_call = InFile::new(source.file_id, &macro_call);
let res = macro_call
@@ -135,20 +94,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
})
.unwrap();
let macro_call_id = res.value.unwrap();
- let macro_file = MacroFile { macro_call_id };
+ let macro_file = MacroFileId { macro_call_id };
let mut expansion_result = db.parse_macro_expansion(macro_file);
expansion_result.err = expansion_result.err.or(res.err);
- expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
+ expansions.push((macro_call.value.clone(), expansion_result));
}
- for (call, exp, arg) in expansions.into_iter().rev() {
+ for (call, exp) in expansions.into_iter().rev() {
let mut tree = false;
let mut expect_errors = false;
- let mut show_token_ids = false;
+ let mut show_spans = false;
+ let mut show_ctxt = false;
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
tree |= comment.to_string().contains("+tree");
expect_errors |= comment.to_string().contains("+errors");
- show_token_ids |= comment.to_string().contains("+tokenids");
+ show_spans |= comment.to_string().contains("+spans");
+ show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let mut expn_text = String::new();
@@ -164,13 +125,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} else {
assert!(
parse.errors().is_empty(),
- "parse errors in expansion: \n{:#?}",
- parse.errors()
+ "parse errors in expansion: \n{:#?}\n```\n{}\n```",
+ parse.errors(),
+ parse.syntax_node(),
);
}
let pp = pretty_print_macro_expansion(
parse.syntax_node(),
- show_token_ids.then_some(&*token_map),
+ SpanMapRef::ExpansionSpanMap(&token_map),
+ show_spans,
+ show_ctxt,
);
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);
@@ -185,27 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
let range = call.syntax().text_range();
let range: Range<usize> = range.into();
-
- if show_token_ids {
- if let Some((tree, map, _)) = arg.value.as_deref() {
- let tt_range = call.token_tree().unwrap().syntax().text_range();
- let mut ranges = Vec::new();
- extract_id_ranges(&mut ranges, map, tree);
- for (range, id) in ranges {
- let idx = (tt_range.start() + range.end()).into();
- text_edits.push((idx..idx, format!("#{}", id.0)));
- }
- }
- text_edits.push((range.start..range.start, "// ".into()));
- call.to_string().match_indices('\n').for_each(|(offset, _)| {
- let offset = offset + 1 + range.start;
- text_edits.push((offset..offset, "// ".into()));
- });
- text_edits.push((range.end..range.end, "\n".into()));
- text_edits.push((range.end..range.end, expn_text));
- } else {
- text_edits.push((range, expn_text));
- }
+ text_edits.push((range, expn_text));
}
text_edits.sort_by_key(|(range, _)| range.start);
@@ -226,19 +170,43 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
_ => None,
};
+
if let Some(src) = src {
- if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
- let pp = pretty_print_macro_expansion(src.value, None);
- format_to!(expanded_text, "\n{}", pp)
+ if let Some(file_id) = src.file_id.macro_file() {
+ if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) {
+ let call = file_id.call_node(&db);
+ let mut show_spans = false;
+ let mut show_ctxt = false;
+ for comment in
+ call.value.children_with_tokens().filter(|it| it.kind() == COMMENT)
+ {
+ show_spans |= comment.to_string().contains("+spans");
+ show_ctxt |= comment.to_string().contains("+syntaxctxt");
+ }
+ let pp = pretty_print_macro_expansion(
+ src.value,
+ db.span_map(src.file_id).as_ref(),
+ show_spans,
+ show_ctxt,
+ );
+ format_to!(expanded_text, "\n{}", pp)
+ }
}
}
}
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
- if src.file_id.is_builtin_derive(&db) {
- let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
- format_to!(expanded_text, "\n{}", pp)
+ if let Some(macro_file) = src.file_id.macro_file() {
+ if macro_file.is_builtin_derive(&db) {
+ let pp = pretty_print_macro_expansion(
+ src.value.syntax().clone(),
+ db.span_map(macro_file.into()).as_ref(),
+ false,
+ false,
+ );
+ format_to!(expanded_text, "\n{}", pp)
+ }
}
}
@@ -246,20 +214,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
expect.assert_eq(&expanded_text);
}
-fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
- tree.token_trees.iter().for_each(|tree| match tree {
- tt::TokenTree::Leaf(leaf) => {
- let id = match leaf {
- tt::Leaf::Literal(it) => it.span,
- tt::Leaf::Punct(it) => it.span,
- tt::Leaf::Ident(it) => it.span,
- };
- ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
- }
- tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
- });
-}
-
fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') {
return pp;
@@ -276,7 +230,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
res
}
-fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
+fn pretty_print_macro_expansion(
+ expn: SyntaxNode,
+ map: SpanMapRef<'_>,
+ show_spans: bool,
+ show_ctxt: bool,
+) -> String {
let mut res = String::new();
let mut prev_kind = EOF;
let mut indent_level = 0;
@@ -322,10 +281,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
}
prev_kind = curr_kind;
format_to!(res, "{}", token);
- if let Some(map) = map {
- if let Some(id) = map.token_by_range(token.text_range()) {
- format_to!(res, "#{}", id.0);
+ if show_spans || show_ctxt {
+ let span = map.span_for_range(token.text_range());
+ format_to!(res, "#");
+ if show_spans {
+ format_to!(
+ res,
+ "{:?}:{:?}@{:?}",
+ span.anchor.file_id,
+ span.anchor.ast_id.into_raw(),
+ span.range,
+ );
+ }
+ if show_ctxt {
+ format_to!(res, "\\{}", span.ctx);
}
+ format_to!(res, "#");
}
}
res
@@ -342,6 +313,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree: &Subtree,
_: Option<&Subtree>,
_: &base_db::Env,
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 822bdcc12..060b8aa8c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -94,6 +94,41 @@ fn foo() {
}
#[test]
+fn macro_rules_in_attr() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
+ check(
+ r#"
+//- proc_macros: identity
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+id! {
+ #[proc_macros::identity]
+ impl Foo for WrapBj {
+ async fn foo(&self) {
+ self.id().await;
+ }
+ }
+}
+"#,
+ expect![[r#"
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+#[proc_macros::identity] impl Foo for WrapBj {
+ async fn foo(&self ) {
+ self .id().await ;
+ }
+}
+"#]],
+ );
+}
+
+#[test]
fn float_parsing_panic() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
check(
@@ -127,3 +162,27 @@ macro_rules! id {
"#]],
);
}
+
+#[test]
+fn float_attribute_mapping() {
+ check(
+ r#"
+//- proc_macros: identity
+//+spans+syntaxctxt
+#[proc_macros::identity]
+fn foo(&self) {
+ self.0. 1;
+}
+"#,
+ expect![[r#"
+//+spans+syntaxctxt
+#[proc_macros::identity]
+fn foo(&self) {
+ self.0. 1;
+}
+
+fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0#
+ self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0#
+}#FileId(0):1@76..77\0#"#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 2d4586146..b3a10a386 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem};
-use base_db::{CrateId, Dependency, Edition, FileId};
+use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
@@ -14,7 +14,6 @@ use hir_expand::{
builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro,
- hygiene::Hygiene,
name::{name, AsName, Name},
proc_macro::ProcMacroExpander,
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
@@ -85,8 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
.enumerate()
.map(|(idx, it)| {
// FIXME: a hacky way to create a Name from string.
- let name =
- tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
+ let name = tt::Ident {
+ text: it.name.clone(),
+ span: tt::SpanData {
+ range: syntax::TextRange::empty(syntax::TextSize::new(0)),
+ anchor: base_db::span::SpanAnchor {
+ file_id: FileId::BOGUS,
+ ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContextId::ROOT,
+ },
+ };
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
})
.collect())
@@ -112,7 +120,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro,
- hygienes: FxHashMap::default(),
};
if tree_id.is_block() {
collector.seed_with_inner(tree_id);
@@ -212,9 +219,23 @@ struct MacroDirective {
#[derive(Clone, Debug, Eq, PartialEq)]
enum MacroDirectiveKind {
- FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
- Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
- Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
+ FnLike {
+ ast_id: AstIdWithPath<ast::MacroCall>,
+ expand_to: ExpandTo,
+ call_site: SyntaxContextId,
+ },
+ Derive {
+ ast_id: AstIdWithPath<ast::Adt>,
+ derive_attr: AttrId,
+ derive_pos: usize,
+ call_site: SyntaxContextId,
+ },
+ Attr {
+ ast_id: AstIdWithPath<ast::Item>,
+ attr: Attr,
+ mod_item: ModItem,
+ /* is this needed? */ tree: TreeId,
+ },
}
/// Walks the tree of module recursively
@@ -242,12 +263,6 @@ struct DefCollector<'a> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
- /// `Hygiene` cache, because `Hygiene` construction is expensive.
- ///
- /// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction.
- /// However, `DefCollector` still needs to lower paths in attributes, in particular those in
- /// derive meta item list.
- hygienes: FxHashMap<HirFileId, Hygiene>,
}
impl DefCollector<'_> {
@@ -315,12 +330,11 @@ impl DefCollector<'_> {
}
if *attr_name == hir_expand::name![feature] {
- let hygiene = &Hygiene::new_unhygienic();
let features = attr
- .parse_path_comma_token_tree(self.db.upcast(), hygiene)
+ .parse_path_comma_token_tree(self.db.upcast())
.into_iter()
.flatten()
- .filter_map(|feat| match feat.segments() {
+ .filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()),
_ => None,
});
@@ -471,7 +485,7 @@ impl DefCollector<'_> {
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
- attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
+ attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@@ -1119,10 +1133,11 @@ impl DefCollector<'_> {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind {
- MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
let call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
+ *call_site,
*expand_to,
self.def_map.krate,
resolver_def_id,
@@ -1134,12 +1149,13 @@ impl DefCollector<'_> {
return false;
}
}
- MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
let id = derive_macro_as_call_id(
self.db,
ast_id,
*derive_attr,
*derive_pos as u32,
+ *call_site,
self.def_map.krate,
resolver,
);
@@ -1212,7 +1228,7 @@ impl DefCollector<'_> {
};
if matches!(
def,
- MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
+ MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
if expander.is_derive()
) {
// Resolved to `#[derive]`
@@ -1234,22 +1250,10 @@ impl DefCollector<'_> {
};
let ast_id = ast_id.with_value(ast_adt_id);
- let extend_unhygenic;
- let hygiene = if file_id.is_macro() {
- self.hygienes
- .entry(file_id)
- .or_insert_with(|| Hygiene::new(self.db.upcast(), file_id))
- } else {
- // Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry
- // when we're in an oridinary (non-macro) file.
- extend_unhygenic = Hygiene::new_unhygienic();
- &extend_unhygenic
- };
-
- match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) {
+ match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => {
let mut len = 0;
- for (idx, path) in derive_macros.enumerate() {
+ for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
@@ -1258,6 +1262,7 @@ impl DefCollector<'_> {
ast_id,
derive_attr: attr.id,
derive_pos: idx,
+ call_site,
},
container: directive.container,
});
@@ -1414,11 +1419,12 @@ impl DefCollector<'_> {
for directive in &self.unresolved_macros {
match &directive.kind {
- MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
+ *call_site,
*expand_to,
self.def_map.krate,
|path| {
@@ -1444,7 +1450,7 @@ impl DefCollector<'_> {
));
}
}
- MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Derive {
@@ -1823,9 +1829,8 @@ impl ModCollector<'_, '_> {
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
let mut single_imports = Vec::new();
- let hygiene = Hygiene::new_unhygienic();
for attr in macro_use_attrs {
- let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
+ let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros.
self.def_collector.import_macros_from_extern_crate(
@@ -1835,7 +1840,7 @@ impl ModCollector<'_, '_> {
);
return;
};
- for path in paths {
+ for (path, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@@ -2083,8 +2088,18 @@ impl ModCollector<'_, '_> {
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => {
// FIXME: a hacky way to create a Name from string.
- name =
- tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name();
+ name = tt::Ident {
+ text: it.clone(),
+ span: tt::SpanData {
+ range: syntax::TextRange::empty(syntax::TextSize::new(0)),
+ anchor: base_db::span::SpanAnchor {
+ file_id: FileId::BOGUS,
+ ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .as_name();
&name
}
None => {
@@ -2210,8 +2225,12 @@ impl ModCollector<'_, '_> {
}
}
- fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
- let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
+ fn collect_macro_call(
+ &mut self,
+ &MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
+ container: ItemContainerId,
+ ) {
+ let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path));
let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@@ -2222,7 +2241,8 @@ impl ModCollector<'_, '_> {
if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
- mac.expand_to,
+ call_site,
+ expand_to,
self.def_collector.def_map.krate,
|path| {
path.as_ident().and_then(|name| {
@@ -2276,7 +2296,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
depth: self.macro_depth + 1,
- kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
+ kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site },
container,
});
}
@@ -2363,7 +2383,6 @@ mod tests {
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro: false,
- hygienes: FxHashMap::default(),
};
collector.seed_with_top_level();
collector.collect();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
index 2dcc2c30f..c45200e2d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,7 +1,7 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId};
-use hir_expand::name::Name;
+use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
use limit::Limit;
use syntax::SmolStr;
@@ -66,14 +66,14 @@ impl ModDir {
attr_path: Option<&SmolStr>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped();
- let orig_file_id = file_id.original_file(db.upcast());
+ let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
let mut candidate_files = ArrayVec::<_, 2>::new();
match attr_path {
Some(attr_path) => {
candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
}
- None if file_id.is_include_macro(db.upcast()) => {
+ None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => {
candidate_files.push(format!("{}.rs", name.display(db.upcast())));
candidate_files.push(format!("{}/mod.rs", name.display(db.upcast())));
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
index 460a908b6..be3438e42 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -96,8 +96,8 @@ impl DefMap {
let types = result.take_types()?;
match types {
ModuleDefId::ModuleId(m) => Visibility::Module(m),
+ // error: visibility needs to refer to module
_ => {
- // error: visibility needs to refer to module
return None;
}
}
@@ -183,15 +183,6 @@ impl DefMap {
shadow: BuiltinShadowMode,
expected_macro_subns: Option<MacroSubNs>,
) -> ResolvePathResult {
- let graph = db.crate_graph();
- let _cx = stdx::panic_context::enter(format!(
- "DefMap {:?} crate_name={:?} block={:?} path={}",
- self.krate,
- graph[self.krate].display_name,
- self.block,
- path.display(db.upcast())
- ));
-
let mut segments = path.segments().iter().enumerate();
let mut curr_per_ns = match path.kind {
PathKind::DollarCrate(krate) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
index e7cc44b04..b2ffbbe4c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect};
use triomphe::Arc;
-use crate::{db::DefDatabase, test_db::TestDB};
-
-use super::DefMap;
+use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
index 4a86f88e5..78cb78e83 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -1,13 +1,19 @@
-use base_db::SourceDatabaseExt;
+use base_db::{SourceDatabase, SourceDatabaseExt};
use triomphe::Arc;
-use crate::{db::DefDatabase, AdtId, ModuleDefId};
-
-use super::*;
+use crate::{
+ db::DefDatabase,
+ nameres::tests::{TestDB, WithFixture},
+ AdtId, ModuleDefId,
+};
fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
- let krate = db.test_crate();
+ let krate = {
+ let crate_graph = db.crate_graph();
+ // Some of these tests use minicore/proc-macros which will be injected as the first crate
+ crate_graph.iter().last().unwrap()
+ };
{
let events = db.log_executed(|| {
db.crate_def_map(krate);
@@ -28,84 +34,199 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
fn typing_inside_a_function_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
- //- /lib.rs
- mod foo;$0
+//- /lib.rs
+mod foo;$0
- use crate::foo::bar::Baz;
+use crate::foo::bar::Baz;
- enum E { A, B }
- use E::*;
+enum E { A, B }
+use E::*;
- fn foo() -> i32 {
- 1 + 1
- }
+fn foo() -> i32 {
+ 1 + 1
+}
- #[cfg(never)]
- fn no() {}
- //- /foo/mod.rs
- pub mod bar;
+#[cfg(never)]
+fn no() {}
+//- /foo/mod.rs
+pub mod bar;
- //- /foo/bar.rs
- pub struct Baz;
- ",
+//- /foo/bar.rs
+pub struct Baz;
+",
r"
- mod foo;
+mod foo;
- use crate::foo::bar::Baz;
+use crate::foo::bar::Baz;
- enum E { A, B }
- use E::*;
+enum E { A, B }
+use E::*;
- fn foo() -> i32 { 92 }
+fn foo() -> i32 { 92 }
- #[cfg(never)]
- fn no() {}
- ",
+#[cfg(never)]
+fn no() {}
+",
);
}
#[test]
fn typing_inside_a_macro_should_not_invalidate_def_map() {
- let (mut db, pos) = TestDB::with_position(
+ check_def_map_is_not_recomputed(
r"
- //- /lib.rs
- macro_rules! m {
- ($ident:ident) => {
- fn f() {
- $ident + $ident;
- };
- }
- }
- mod foo;
+//- /lib.rs
+macro_rules! m {
+ ($ident:ident) => {
+ fn f() {
+ $ident + $ident;
+ };
+ }
+}
+mod foo;
- //- /foo/mod.rs
- pub mod bar;
+//- /foo/mod.rs
+pub mod bar;
- //- /foo/bar.rs
- $0
- m!(X);
- ",
+//- /foo/bar.rs
+$0
+m!(X);
+
+pub struct S {}
+",
+ r"
+m!(Y);
+
+pub struct S {}
+",
);
- let krate = db.test_crate();
- {
- let events = db.log_executed(|| {
- let crate_def_map = db.crate_def_map(krate);
- let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
- assert_eq!(module_data.scope.resolutions().count(), 1);
- });
- assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
+}
+
+#[test]
+fn typing_inside_an_attribute_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: identity
+//- /lib.rs
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+#[proc_macros::identity]
+fn f() {}
+",
+ r"
+#[proc_macros::identity]
+fn f() { foo }
+",
+ );
+}
+
+#[test]
+fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: identity
+//- /lib.rs
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+#[proc_macros::identity]
+fn f() {}
+",
+ r"
+#[proc_macros::identity(foo)]
+fn f() {}
+",
+ );
+}
+#[test]
+fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: identity, derive_identity
+//- /lib.rs
+macro_rules! m {
+ ($ident:ident) => {
+ fn fm() {
+ $ident + $ident;
+ };
}
- db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
+}
+mod foo;
- {
- let events = db.log_executed(|| {
- let crate_def_map = db.crate_def_map(krate);
- let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
- assert_eq!(module_data.scope.resolutions().count(), 1);
- });
- assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+fn f() {}
+
+m!(X);
+macro_rules! m2 {
+ ($ident:ident) => {
+ fn f2() {
+ $ident + $ident;
+ };
}
}
+m2!(X);
+
+#[proc_macros::identity]
+#[derive(proc_macros::DeriveIdentity)]
+pub struct S {}
+",
+ r"
+fn f() {0}
+
+m!(X);
+macro_rules! m2 {
+ ($ident:ident) => {
+ fn f2() {
+ $ident + $ident;
+ };
+ }
+}
+m2!(X);
+
+#[proc_macros::identity]
+#[derive(proc_macros::DeriveIdentity)]
+pub struct S {}
+",
+ );
+}
+
+#[test]
+fn typing_inside_a_derive_should_not_invalidate_def_map() {
+ check_def_map_is_not_recomputed(
+ r"
+//- proc_macros: derive_identity
+//- minicore:derive
+//- /lib.rs
+mod foo;
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+$0
+#[derive(proc_macros::DeriveIdentity)]
+#[allow()]
+struct S;
+",
+ r"
+#[derive(proc_macros::DeriveIdentity)]
+#[allow(dead_code)]
+struct S;
+",
+ );
+}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_expansions() {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
index 3894172a5..215c49d4c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -96,8 +96,8 @@ pub enum GenericArg {
impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
- pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
- lower::lower_path(path, ctx)
+ pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
+ lower::lower_path(ctx, path)
}
/// Converts a known mod path to `Path`.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
index abd817893..39f1b6f1c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -4,8 +4,10 @@ use std::iter;
use crate::{lower::LowerCtx, type_ref::ConstRef};
-use either::Either;
-use hir_expand::name::{name, AsName};
+use hir_expand::{
+ mod_path::resolve_crate_root,
+ name::{name, AsName},
+};
use intern::Interned;
use syntax::ast::{self, AstNode, HasTypeBounds};
@@ -16,12 +18,12 @@ use crate::{
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
-pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
+pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut type_anchor = None;
let mut segments = Vec::new();
let mut generic_args = Vec::new();
- let hygiene = ctx.hygiene();
+ let span_map = ctx.span_map();
loop {
let segment = path.segment()?;
@@ -31,31 +33,31 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
- // FIXME: this should just return name
- match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
- Either::Left(name) => {
- let args = segment
- .generic_arg_list()
- .and_then(|it| lower_generic_args(ctx, it))
- .or_else(|| {
- lower_generic_args_from_fn_path(
- ctx,
- segment.param_list(),
- segment.ret_type(),
- )
- })
- .map(Interned::new);
- if let Some(_) = args {
- generic_args.resize(segments.len(), None);
- generic_args.push(args);
- }
- segments.push(name);
- }
- Either::Right(crate_id) => {
- kind = PathKind::DollarCrate(crate_id);
- break;
- }
+ if name_ref.text() == "$crate" {
+ break kind = resolve_crate_root(
+ ctx.db.upcast(),
+ span_map.span_for_range(name_ref.syntax().text_range()).ctx,
+ )
+ .map(PathKind::DollarCrate)
+ .unwrap_or(PathKind::Crate);
+ }
+ let name = name_ref.as_name();
+ let args = segment
+ .generic_arg_list()
+ .and_then(|it| lower_generic_args(ctx, it))
+ .or_else(|| {
+ lower_generic_args_from_fn_path(
+ ctx,
+ segment.param_list(),
+ segment.ret_type(),
+ )
+ })
+ .map(Interned::new);
+ if let Some(_) = args {
+ generic_args.resize(segments.len(), None);
+ generic_args.push(args);
}
+ segments.push(name);
}
ast::PathSegmentKind::SelfTypeKw => {
segments.push(name![Self]);
@@ -74,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
- Path::from_src(trait_ref.path()?, ctx)?
+ Path::from_src(ctx, trait_ref.path()?)?
else {
return None;
};
@@ -151,8 +153,14 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// We follow what it did anyway :)
if segments.len() == 1 && kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
- if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
- kind = PathKind::DollarCrate(crate_id);
+ let syn_ctxt = span_map.span_for_range(path.segment()?.syntax().text_range()).ctx;
+ if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
+ if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+ kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
+ Some(crate_root) => PathKind::DollarCrate(crate_root),
+ None => PathKind::Crate,
+ }
+ }
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index 50da9ed06..2ac1516ec 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -588,6 +588,24 @@ impl Resolver {
_ => None,
})
}
+
+ pub fn type_owner(&self) -> Option<TypeOwnerId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::BlockScope(_) => None,
+ &Scope::GenericParams { def, .. } => Some(def.into()),
+ &Scope::ImplDefScope(id) => Some(id.into()),
+ &Scope::AdtScope(adt) => Some(adt.into()),
+ Scope::ExprScope(it) => Some(it.owner.into()),
+ })
+ }
+
+ pub fn impl_def(&self) -> Option<ImplId> {
+ self.scopes().find_map(|scope| match scope {
+ Scope::ImplDefScope(def) => Some(*def),
+ _ => None,
+ })
+ }
+
/// `expr_id` is required to be an expression id that comes after the top level expression scope in the given resolver
#[must_use]
pub fn update_to_inner_scope(
@@ -1071,7 +1089,6 @@ impl HasResolver for TypeOwnerId {
TypeOwnerId::TypeAliasId(it) => it.resolver(db),
TypeOwnerId::ImplId(it) => it.resolver(db),
TypeOwnerId::EnumVariantId(it) => it.resolver(db),
- TypeOwnerId::ModuleId(it) => it.resolver(db),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
index a6befc8a8..f4a6b61f7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -34,6 +34,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
index 30f48de61..f5803653c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/visibility.rs
@@ -2,7 +2,7 @@
use std::iter;
-use hir_expand::{hygiene::Hygiene, InFile};
+use hir_expand::{span::SpanMapRef, InFile};
use la_arena::ArenaMap;
use syntax::ast;
use triomphe::Arc;
@@ -34,22 +34,22 @@ impl RawVisibility {
db: &dyn DefDatabase,
node: InFile<Option<ast::Visibility>>,
) -> RawVisibility {
- Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
+ Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref())
}
- pub(crate) fn from_ast_with_hygiene(
+ pub(crate) fn from_ast_with_span_map(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> RawVisibility {
- Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
+ Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map)
}
- pub(crate) fn from_ast_with_hygiene_and_default(
+ pub(crate) fn from_ast_with_span_map_and_default(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
default: RawVisibility,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> RawVisibility {
let node = match node {
None => return default,
@@ -57,7 +57,7 @@ impl RawVisibility {
};
match node.kind() {
ast::VisibilityKind::In(path) => {
- let path = ModPath::from_src(db.upcast(), path, hygiene);
+ let path = ModPath::from_src(db.upcast(), path, span_map);
let path = match path {
None => return RawVisibility::private(),
Some(path) => path,
@@ -73,7 +73,7 @@ impl RawVisibility {
RawVisibility::Module(path)
}
ast::VisibilityKind::PubSelf => {
- let path = ModPath::from_kind(PathKind::Plain);
+ let path = ModPath::from_kind(PathKind::Super(0));
RawVisibility::Module(path)
}
ast::VisibilityKind::Pub => RawVisibility::Public,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 1f27204c1..361bbec43 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -13,11 +13,11 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-tracing = "0.1.35"
-either = "1.7.0"
+tracing.workspace = true
+either.workspace = true
rustc-hash = "1.1.0"
la-arena.workspace = true
-itertools = "0.10.5"
+itertools.workspace = true
hashbrown.workspace = true
smallvec.workspace = true
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
index 1906ed15b..be0b72f9d 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -12,11 +12,40 @@ use std::{
marker::PhantomData,
};
-use la_arena::{Arena, Idx};
+use la_arena::{Arena, Idx, RawIdx};
use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+use crate::db;
+
+pub use base_db::span::ErasedFileAstId;
+
+/// `AstId` points to an AST node in any file.
+///
+/// It is stable across reparses, and can be used as salsa key/value.
+pub type AstId<N> = crate::InFile<FileAstId<N>>;
+
+impl<N: AstIdNode> AstId<N> {
+ pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
+ self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
+ }
+ pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
+ crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
+ }
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
+ db.ast_id_map(self.file_id).get(self.value)
+ }
+}
+
+pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
+
+impl ErasedAstId {
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
+ db.ast_id_map(self.file_id).get_erased(self.value)
+ }
+}
+
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
@@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
}
}
-pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
-
pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => {
@@ -99,7 +126,7 @@ register_ast_id_node! {
TraitAlias,
TypeAlias,
Use,
- AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
+ AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg, Param, SelfParam
}
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
@@ -129,6 +156,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
+
+ // make sure to allocate the root node
+ if !should_alloc_id(node.kind()) {
+ res.alloc(node);
+ }
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
@@ -136,9 +168,9 @@ impl AstIdMap {
bdfs(node, |it| {
if should_alloc_id(it.kind()) {
res.alloc(&it);
- true
+ TreeOrder::BreadthFirst
} else {
- false
+ TreeOrder::DepthFirst
}
});
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@@ -155,6 +187,11 @@ impl AstIdMap {
res
}
+ /// The [`AstId`] of the root node
+ pub fn root(&self) -> SyntaxNodePtr {
+ self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
+ }
+
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
@@ -164,7 +201,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
- pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
+ pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
}
@@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
hasher.finish()
}
+#[derive(Copy, Clone, PartialEq, Eq)]
+enum TreeOrder {
+ BreadthFirst,
+ DepthFirst,
+}
+
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
/// order? It is a mix of breadth-first and depth first orders. Nodes for which
-/// `f` returns true are visited breadth-first, all the other nodes are explored
-/// depth-first.
+/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
+/// [`TreeOrder::DepthFirst`].
///
/// In other words, the size of the bfs queue is bound by the number of "true"
/// nodes.
-fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
+fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
@@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
while let Some(event) = preorder.next() {
match event {
syntax::WalkEvent::Enter(node) => {
- if f(node.clone()) {
+ if f(node.clone()) == TreeOrder::BreadthFirst {
next_layer.extend(node.children());
preorder.skip_subtree();
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
index 0ec2422b3..b8fc30c91 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -1,19 +1,19 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
-use base_db::CrateId;
+use base_db::{span::SyntaxContextId, CrateId};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
-use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
+use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
use crate::{
db::ExpandDatabase,
- hygiene::Hygiene,
mod_path::ModPath,
+ span::SpanMapRef,
tt::{self, Subtree},
InFile,
};
@@ -39,28 +39,33 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
- pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
- let entries = collect_attrs(owner)
- .filter_map(|(id, attr)| match attr {
- Either::Left(attr) => {
- attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
- }
- Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
- id,
- input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
- path: Interned::new(ModPath::from(crate::name!(doc))),
- }),
- })
- .collect::<Vec<_>>();
- // FIXME: use `Arc::from_iter` when it becomes available
- let entries: Arc<[Attr]> = Arc::from(entries);
+ pub fn new(
+ db: &dyn ExpandDatabase,
+ owner: &dyn ast::HasAttrs,
+ span_map: SpanMapRef<'_>,
+ ) -> Self {
+ let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
+ Either::Left(attr) => {
+ attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
+ }
+ Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
+ id,
+ input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
+ path: Interned::new(ModPath::from(crate::name!(doc))),
+ ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
+ }),
+ });
+ let entries: Arc<[Attr]> = Arc::from_iter(entries);
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
- pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
- let hygiene = Hygiene::new(db, owner.file_id);
- Self::new(db, owner.value, &hygiene)
+ pub fn from_attrs_owner(
+ db: &dyn ExpandDatabase,
+ owner: InFile<&dyn ast::HasAttrs>,
+ span_map: SpanMapRef<'_>,
+ ) -> Self {
+ Self::new(db, owner.value, span_map)
}
pub fn merge(&self, other: Self) -> Self {
@@ -71,19 +76,13 @@ impl RawAttrs {
(Some(a), Some(b)) => {
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
Self {
- entries: Some(Arc::from(
- a.iter()
- .cloned()
- .chain(b.iter().map(|it| {
- let mut it = it.clone();
- it.id.id = it.id.ast_index() as u32 + last_ast_index
- | (it.id.cfg_attr_index().unwrap_or(0) as u32)
- << AttrId::AST_INDEX_BITS;
- it
- }))
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- )),
+ entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
+ let mut it = it.clone();
+ it.id.id = it.id.ast_index() as u32 + last_ast_index
+ | (it.id.cfg_attr_index().unwrap_or(0) as u32)
+ << AttrId::AST_INDEX_BITS;
+ it
+ })))),
}
}
}
@@ -100,51 +99,43 @@ impl RawAttrs {
}
let crate_graph = db.crate_graph();
- let new_attrs = Arc::from(
- self.iter()
- .flat_map(|attr| -> SmallVec<[_; 1]> {
- let is_cfg_attr =
- attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
- if !is_cfg_attr {
- return smallvec![attr.clone()];
- }
-
- let subtree = match attr.token_tree_value() {
- Some(it) => it,
- _ => return smallvec![attr.clone()],
- };
+ let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
+ let is_cfg_attr =
+ attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
+ if !is_cfg_attr {
+ return smallvec![attr.clone()];
+ }
- let (cfg, parts) = match parse_cfg_attr_input(subtree) {
- Some(it) => it,
- None => return smallvec![attr.clone()],
+ let subtree = match attr.token_tree_value() {
+ Some(it) => it,
+ _ => return smallvec![attr.clone()],
+ };
+
+ let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+ Some(it) => it,
+ None => return smallvec![attr.clone()],
+ };
+ let index = attr.id;
+ let attrs =
+ parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
+ let tree = Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: attr.to_vec(),
};
- let index = attr.id;
- let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
- |(idx, attr)| {
- let tree = Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: attr.to_vec(),
- };
- // FIXME hygiene
- let hygiene = Hygiene::new_unhygienic();
- Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
- },
- );
-
- let cfg_options = &crate_graph[krate].cfg_options;
- let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
- let cfg = CfgExpr::parse(&cfg);
- if cfg_options.check(&cfg) == Some(false) {
- smallvec![]
- } else {
- cov_mark::hit!(cfg_attr_active);
-
- attrs.collect()
- }
- })
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- );
+ Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
+ });
+
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+ let cfg = CfgExpr::parse(&cfg);
+ if cfg_options.check(&cfg) == Some(false) {
+ smallvec![]
+ } else {
+ cov_mark::hit!(cfg_attr_active);
+
+ attrs.collect()
+ }
+ }));
RawAttrs { entries: Some(new_attrs) }
}
@@ -185,21 +176,23 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
+ pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
/// `#[attr = "string"]`
+ // FIXME: This is losing span
Literal(SmolStr),
/// `#[attr(subtree)]`
- TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
+ TokenTree(Box<tt::Subtree>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
- AttrInput::TokenTree(tt) => tt.0.fmt(f),
+ AttrInput::TokenTree(tt) => tt.fmt(f),
}
}
}
@@ -208,10 +201,10 @@ impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
- let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
+ let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(),
@@ -219,24 +212,20 @@ impl Attr {
};
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
- let (tree, map) = syntax_node_to_token_tree(tt.syntax());
- Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
+ let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
+ Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
- Some(Attr { id, path, input })
+ Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
}
- fn from_tt(
- db: &dyn ExpandDatabase,
- tt: &tt::Subtree,
- hygiene: &Hygiene,
- id: AttrId,
- ) -> Option<Attr> {
- let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
+ fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
+ // FIXME: Unecessary roundtrip tt -> ast -> tt
+ let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
- Self::from_src(db, ast, hygiene, id)
+ Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
}
pub fn path(&self) -> &ModPath {
@@ -256,7 +245,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
- AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
+ AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@@ -267,7 +256,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
- AttrInput::TokenTree(tt) => Some(&tt.0),
+ AttrInput::TokenTree(tt) => Some(tt),
_ => None,
}
}
@@ -276,8 +265,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
- hygiene: &'a Hygiene,
- ) -> Option<impl Iterator<Item = ModPath> + 'a> {
+ ) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis {
@@ -290,12 +278,13 @@ impl Attr {
if tts.is_empty() {
return None;
}
- // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
+ // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
+ // here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
- token_trees: tts.into_iter().cloned().collect(),
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: tts.to_vec(),
};
- let (parse, _) =
+ let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
@@ -304,7 +293,11 @@ impl Attr {
return None;
}
let path = meta.path()?;
- ModPath::from_src(db, path, hygiene)
+ let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
+ Some((
+ ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
+ call_site,
+ ))
});
Some(paths)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 4ee12e2f2..de58a495f 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -1,16 +1,22 @@
//! Builtin attributes.
+use base_db::{
+ span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+};
+use syntax::{TextRange, TextSize};
+
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
- ( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
+ ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinAttrExpander {
$($variant),*
}
impl BuiltinAttrExpander {
- pub fn expand(
+ pub fn $expand_fn(
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
@@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
}
}
-register_builtin! {
+register_builtin! { expand:
(bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
@@ -77,9 +83,8 @@ fn dummy_attr_expand(
///
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
/// ```
-/// #[Foo]
-/// #[bar::Bar]
-/// ();
+/// #![Foo]
+/// #![bar::Bar]
/// ```
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
/// Since we do not expand the attribute in nameres though, we keep the original item.
@@ -98,21 +103,31 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
- _ => return ExpandResult::ok(tt::Subtree::empty()),
+ MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
+ attr_args
+ }
+ _ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
};
- pseudo_derive_attr_expansion(tt, derives)
+ pseudo_derive_attr_expansion(tt, derives, loc.call_site)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
+ call_site: SyntaxContextId,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
spacing: tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
+ span: tt::SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: base_db::span::SpanAnchor {
+ file_id: FileId::BOGUS,
+ ast_id: ROOT_ERASED_FILE_AST_ID,
+ },
+ ctx: call_site,
+ },
}))
};
@@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
token_trees.push(mk_leaf('#'));
+ token_trees.push(mk_leaf('!'));
token_trees.push(mk_leaf('['));
token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']'));
}
- token_trees.push(mk_leaf('('));
- token_trees.push(mk_leaf(')'));
- token_trees.push(mk_leaf(';'));
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index ecc8b407a..410aa4d28 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -1,16 +1,16 @@
//! Builtin derives.
-use ::tt::Ident;
-use base_db::{CrateOrigin, LangCrateOrigin};
+use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
use itertools::izip;
-use mbe::TokenMap;
use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
use crate::{
+ hygiene::span_with_def_site_ctxt,
name::{AsName, Name},
- tt::{self, TokenId},
+ span::SpanMapRef,
+ tt,
};
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
@@ -29,12 +29,15 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
- token_map: &TokenMap,
+ token_map: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
- expander(db, id, tt, token_map)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, span, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@@ -70,19 +73,19 @@ enum VariantShape {
Unit,
}
-fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
- (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
+fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
+ (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
}
impl VariantShape {
- fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
- self.as_pattern_map(path, |it| quote!(#it))
+ fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
+ self.as_pattern_map(path, span, |it| quote!(span => #it))
}
- fn field_names(&self) -> Vec<tt::Ident> {
+ fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
match self {
VariantShape::Struct(s) => s.clone(),
- VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
+ VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![],
}
}
@@ -90,26 +93,27 @@ impl VariantShape {
fn as_pattern_map(
&self,
path: tt::Subtree,
+ span: SpanData,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| {
let mapped = field_map(it);
- quote! { #it : #mapped , }
+ quote! {span => #it : #mapped , }
});
- quote! {
+ quote! {span =>
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
- let fields = tuple_field_iterator(n).map(|it| {
+ let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it);
- quote! {
+ quote! {span =>
#mapped ,
}
});
- quote! {
+ quote! {span =>
#path ( ##fields )
}
}
@@ -117,7 +121,7 @@ impl VariantShape {
}
}
- fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
+ fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@@ -139,17 +143,17 @@ enum AdtShape {
}
impl AdtShape {
- fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
- self.as_pattern_map(name, |it| quote!(#it))
+ fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
+ self.as_pattern_map(name, |it| quote!(span =>#it), span)
}
- fn field_names(&self) -> Vec<Vec<tt::Ident>> {
+ fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
match self {
AdtShape::Struct(s) => {
- vec![s.field_names()]
+ vec![s.field_names(span)]
}
AdtShape::Enum { variants, .. } => {
- variants.iter().map(|(_, fields)| fields.field_names()).collect()
+ variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
}
AdtShape::Union => {
never!("using fields of union in derive is always wrong");
@@ -162,18 +166,21 @@ impl AdtShape {
&self,
name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
+ span: SpanData,
) -> Vec<tt::Subtree> {
match self {
AdtShape::Struct(s) => {
- vec![s.as_pattern_map(quote! { #name }, field_map)]
+ vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
}
AdtShape::Enum { variants, .. } => variants
.iter()
- .map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
+ .map(|(v, fields)| {
+ fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
+ })
.collect(),
AdtShape::Union => {
never!("pattern matching on union is always wrong");
- vec![quote! { un }]
+ vec![quote! {span => un }]
}
}
}
@@ -189,8 +196,12 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
-fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
- let (name, generic_param_list, shape) = match &adt {
+fn parse_adt(
+ tm: SpanMapRef<'_>,
+ adt: &ast::Adt,
+ call_site: SpanData,
+) -> Result<BasicAdtInfo, ExpandError> {
+ let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
@@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this {
Some(it) => {
param_type_set.insert(it.as_name());
- mbe::syntax_node_to_token_tree(it.syntax()).0
+ mbe::syntax_node_to_token_tree(it.syntax(), tm)
+ }
+ None => {
+ tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}
- None => tt::Subtree::empty(),
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => {
- it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
+ it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
}
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
- .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
- .unwrap_or_else(tt::Subtree::empty);
+ .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
+ .unwrap_or_else(|| {
+ tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
+ });
Some(ty)
} else {
None
@@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
- .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
+ .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
.collect();
- let name_token = name_to_token(&tm, name)?;
+ let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
-fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
+fn name_to_token(
+ token_map: SpanMapRef<'_>,
+ name: Option<ast::Name>,
+) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
ExpandError::other("missing name")
})?;
- let name_token_id =
- token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
- let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
+ let span = token_map.span_for_range(name.syntax().text_range());
+ let name_token = tt::Ident { span, text: name.text().into() };
Ok(name_token)
}
@@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
+ // FIXME: use
+ invoc_span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let info = match parse_adt(tm, tt) {
+ let info = match parse_adt(tm, tt, invoc_span) {
Ok(info) => info,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
+ e,
+ )
+ }
};
let trait_body = make_trait_body(&info);
let mut where_block = vec![];
@@ -349,13 +373,13 @@ fn expand_simple_derive(
let ident_ = ident.clone();
if let Some(b) = bound {
let ident = ident.clone();
- where_block.push(quote! { #ident : #b , });
+ where_block.push(quote! {invoc_span => #ident : #b , });
}
if let Some(ty) = param_ty {
- (quote! { const #ident : #ty , }, quote! { #ident_ , })
+ (quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
} else {
let bound = trait_path.clone();
- (quote! { #ident : #bound , }, quote! { #ident_ , })
+ (quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
}
})
.unzip();
@@ -363,17 +387,17 @@ fn expand_simple_derive(
where_block.extend(info.associated_types.iter().map(|it| {
let it = it.clone();
let bound = trait_path.clone();
- quote! { #it : #bound , }
+ quote! {invoc_span => #it : #bound , }
}));
let name = info.name;
- let expanded = quote! {
+ let expanded = quote! {invoc_span =>
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
};
ExpandResult::ok(expanded)
}
-fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
+fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
cov_mark::hit!(test_copy_expand_in_core);
- quote! { crate }
+ quote! {span => crate }
} else {
- quote! { core }
+ quote! {span => core }
};
tt.token_trees[0].clone()
@@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn clone(&self) -> Self {
#star self
}
};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn clone(&self) -> Self {
match #star self {}
}
};
}
let name = &adt.name;
- let patterns = adt.shape.as_pattern(name);
- let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
+ let patterns = adt.shape.as_pattern(span, name);
+ let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
- let fat_arrow = fat_arrow();
- quote! {
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
#pat #fat_arrow #expr,
}
});
- quote! {
+ quote! {span =>
fn clone(&self) -> Self {
match self {
##arms
@@ -451,53 +469,56 @@ fn clone_expand(
})
}
-/// This function exists since `quote! { => }` doesn't work.
-fn fat_arrow() -> ::tt::Subtree<TokenId> {
- let eq =
- tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
- quote! { #eq> }
+/// This function exists since `quote! {span => => }` doesn't work.
+fn fat_arrow(span: SpanData) -> tt::Subtree {
+ let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
+ quote! {span => #eq> }
}
-/// This function exists since `quote! { && }` doesn't work.
-fn and_and() -> ::tt::Subtree<TokenId> {
- let and =
- tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
- quote! { #and& }
+/// This function exists since `quote! {span => && }` doesn't work.
+fn and_and(span: SpanData) -> tt::Subtree {
+ let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
+ quote! {span => #and& }
}
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
- fields
- .as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
+ fields.as_pattern_map(
+ quote!(span =>#name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
+ )
}
AdtShape::Enum { default_variant, variants } => {
if let Some(d) = default_variant {
let (name, fields) = &variants[*d];
let adt_name = &adt.name;
fields.as_pattern_map(
- quote!(#adt_name :: #name),
- |_| quote!(#krate::default::Default::default()),
+ quote!(span =>#adt_name :: #name),
+ span,
+ |_| quote!(span =>#krate::default::Default::default()),
)
} else {
// FIXME: Return expand error here
- quote!()
+ quote!(span =>)
}
}
AdtShape::Union => {
// FIXME: Return expand error here
- quote!()
+ quote!(span =>)
}
};
- quote! {
+ quote! {span =>
fn default() -> Self {
#body
}
@@ -508,44 +529,41 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
let x_string = it.to_string();
- quote! {
+ quote! {span =>
.field(#x_string, & #it)
}
});
- quote! {
+ quote! {span =>
f.debug_struct(#name) ##for_fields .finish()
}
}
VariantShape::Tuple(n) => {
- let for_fields = tuple_field_iterator(*n).map(|it| {
- quote! {
+ let for_fields = tuple_field_iterator(span, *n).map(|it| {
+ quote! {span =>
.field( & #it)
}
});
- quote! {
+ quote! {span =>
f.debug_tuple(#name) ##for_fields .finish()
}
}
- VariantShape::Unit => quote! {
+ VariantShape::Unit => quote! {span =>
f.write_str(#name)
},
};
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {}
}
@@ -553,20 +571,20 @@ fn debug_expand(
}
let arms = match &adt.shape {
AdtShape::Struct(fields) => {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let name = &adt.name;
- let pat = fields.as_pattern(quote!(#name));
+ let pat = fields.as_pattern(quote!(span =>#name), span);
let expr = for_variant(name.to_string(), fields);
- vec![quote! { #pat #fat_arrow #expr }]
+ vec![quote! {span => #pat #fat_arrow #expr }]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(name, v)| {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let adt_name = &adt.name;
- let pat = v.as_pattern(quote!(#adt_name :: #name));
+ let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
let expr = for_variant(name.to_string(), v);
- quote! {
+ quote! {span =>
#pat #fat_arrow #expr ,
}
})
@@ -576,7 +594,7 @@ fn debug_expand(
vec![]
}
};
- quote! {
+ quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match self {
##arms
@@ -589,47 +607,46 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote! {};
+ return quote! {span =>};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
- let star = tt::Punct {
- char: '*',
- spacing: ::tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
- };
- return quote! {
+ let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
+ return quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
}
};
}
- let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
- |(pat, names)| {
- let expr = {
- let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
- quote! { {
- ##it
- } }
- };
- let fat_arrow = fat_arrow();
- quote! {
- #pat #fat_arrow #expr ,
- }
- },
- );
+ let arms =
+ adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
+ |(pat, names)| {
+ let expr = {
+ let it =
+ names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
+ quote! {span => {
+ ##it
+ } }
+ };
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
+ #pat #fat_arrow #expr ,
+ }
+ },
+ );
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
- quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
+ quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
} else {
- quote! {}
+ quote! {span =>}
};
- quote! {
+ quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
#check_discriminant
match self {
@@ -643,56 +660,58 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
+ let krate = find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote! {};
+ return quote! {span =>};
}
let name = &adt.name;
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, names)| {
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let body = match &*names {
[] => {
- quote!(true)
+ quote!(span =>true)
}
[first, rest @ ..] => {
let rest = rest.iter().map(|it| {
- let t1 = Ident::new(format!("{}_self", it.text), it.span);
- let t2 = Ident::new(format!("{}_other", it.text), it.span);
- let and_and = and_and();
- quote!(#and_and #t1 .eq( #t2 ))
+ let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
+ let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
+ let and_and = and_and(span);
+ quote!(span =>#and_and #t1 .eq( #t2 ))
});
let first = {
- let t1 = Ident::new(format!("{}_self", first.text), first.span);
- let t2 = Ident::new(format!("{}_other", first.text), first.span);
- quote!(#t1 .eq( #t2 ))
+ let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
+ let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
+ quote!(span =>#t1 .eq( #t2 ))
};
- quote!(#first ##rest)
+ quote!(span =>#first ##rest)
}
};
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
- quote! {
+ let fat_arrow = fat_arrow(span);
+ quote! {span =>
fn eq(&self, other: &Self) -> bool {
match (self, other) {
##arms
@@ -706,35 +725,46 @@ fn partial_eq_expand(
fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
+ span: SpanData,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
- let self_patterns = adt.shape.as_pattern_map(name, |it| {
- let t = Ident::new(format!("{}_self", it.text), it.span);
- quote!(#t)
- });
- let other_patterns = adt.shape.as_pattern_map(name, |it| {
- let t = Ident::new(format!("{}_other", it.text), it.span);
- quote!(#t)
- });
+ let self_patterns = adt.shape.as_pattern_map(
+ name,
+ |it| {
+ let t = tt::Ident::new(format!("{}_self", it.text), it.span);
+ quote!(span =>#t)
+ },
+ span,
+ );
+ let other_patterns = adt.shape.as_pattern_map(
+ name,
+ |it| {
+ let t = tt::Ident::new(format!("{}_other", it.text), it.span);
+ quote!(span =>#t)
+ },
+ span,
+ );
(self_patterns, other_patterns)
}
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
+ span: SpanData,
) -> tt::Subtree {
- let fat_arrow1 = fat_arrow();
- let fat_arrow2 = fat_arrow();
- quote! {
+ let fat_arrow1 = fat_arrow(span);
+ let fat_arrow2 = fat_arrow(span);
+ quote! {span =>
match #left.cmp(&#right) {
#krate::cmp::Ordering::Equal #fat_arrow1 {
#rest
@@ -745,34 +775,34 @@ fn ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote!();
+ return quote!(span =>);
}
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
- let mut body = quote!(#krate::cmp::Ordering::Equal);
+ let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() {
- let t1 = Ident::new(format!("{}_self", f.text), f.span);
- let t2 = Ident::new(format!("{}_other", f.text), f.span);
- body = compare(krate, quote!(#t1), quote!(#t2), body);
+ let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
- let fat_arrow = fat_arrow();
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ let fat_arrow = fat_arrow(span);
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
- let mut body = quote! {
+ let fat_arrow = fat_arrow(span);
+ let mut body = quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::cmp::Ordering::Equal
}
};
if matches!(&adt.shape, AdtShape::Enum { .. }) {
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
- body = compare(krate, left, right, body);
+ let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
+ let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
+ body = compare(krate, left, right, body, span);
}
- quote! {
+ quote! {span =>
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
}
@@ -783,20 +813,22 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
+ span: SpanData,
tt: &ast::Adt,
- tm: &TokenMap,
+ tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
- let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
+ let krate = &find_builtin_crate(db, id, span);
+ expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
+ span: SpanData,
) -> tt::Subtree {
- let fat_arrow1 = fat_arrow();
- let fat_arrow2 = fat_arrow();
- quote! {
+ let fat_arrow1 = fat_arrow(span);
+ let fat_arrow2 = fat_arrow(span);
+ quote! {span =>
match #left.partial_cmp(&#right) {
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
#rest
@@ -807,37 +839,39 @@ fn partial_ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
- return quote!();
+ return quote!(span =>);
}
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
+ let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
+ let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
- let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
- let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
+ let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
+ let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
- let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
+ let mut body =
+ quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() {
- let t1 = Ident::new(format!("{}_self", f.text), f.span);
- let t2 = Ident::new(format!("{}_other", f.text), f.span);
- body = compare(krate, quote!(#t1), quote!(#t2), body);
+ let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
+ let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
+ body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
- let fat_arrow = fat_arrow();
- quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
+ let fat_arrow = fat_arrow(span);
+ quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
- let fat_arrow = fat_arrow();
+ let fat_arrow = fat_arrow(span);
let body = compare(
krate,
left,
right,
- quote! {
+ quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
}
},
+ span,
);
- quote! {
+ quote! {span =>
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
#body
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index 30b19b6e5..c8f04bfee 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -1,17 +1,24 @@
//! Builtin macro
-use base_db::{AnchoredPath, Edition, FileId};
+use base_db::{
+ span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ AnchoredPath, Edition, FileId,
+};
use cfg::CfgExpr;
use either::Either;
-use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
+use itertools::Itertools;
+use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
- db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
- MacroCallLoc,
+ db::ExpandDatabase,
+ hygiene::span_with_def_site_ctxt,
+ name, quote,
+ tt::{self, DelimSpan},
+ ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@@ -36,7 +43,10 @@ macro_rules! register_builtin {
let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )*
};
- expander(db, id, tt)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, tt, span)
}
}
@@ -44,13 +54,16 @@ macro_rules! register_builtin {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
- arg_id: MacroCallId,
+ id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
- expander(db, arg_id, tt)
+
+ let span = db.lookup_intern_macro_call(id).span(db);
+ let span = span_with_def_site_ctxt(db, span, id);
+ expander(db, id, tt, span)
}
}
@@ -78,7 +91,7 @@ pub fn find_builtin_macro(
register_builtin! {
LAZY:
- (column, Column) => column_expand,
+ (column, Column) => line_expand,
(file, File) => file_expand,
(line, Line) => line_expand,
(module_path, ModulePath) => module_path_expand,
@@ -109,99 +122,108 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand
}
-const DOLLAR_CRATE: tt::Ident =
- tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
+fn mk_pound(span: SpanData) -> tt::Subtree {
+ crate::quote::IntoTt::to_subtree(
+ vec![crate::tt::Leaf::Punct(crate::tt::Punct {
+ char: '#',
+ spacing: crate::tt::Spacing::Alone,
+ span: span,
+ })
+ .into()],
+ span,
+ )
+}
fn module_path_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Just return a dummy result.
- ExpandResult::ok(quote! { "module::path" })
+ ExpandResult::ok(quote! {span =>
+ "module::path"
+ })
}
fn line_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
- let expanded = quote! {
- 0 as u32
- };
-
- ExpandResult::ok(expanded)
+ // Note that `line!` and `column!` will never be implemented properly, as they are by definition
+ // not incremental
+ ExpandResult::ok(tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+ text: "0u32".into(),
+ span,
+ }))],
+ })
}
fn log_syntax_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn trace_macros_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn stringify_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees);
- let expanded = quote! {
+ let expanded = quote! {span =>
#pretty
};
ExpandResult::ok(expanded)
}
-fn column_expand(
- _db: &dyn ExpandDatabase,
- _id: MacroCallId,
- _tt: &tt::Subtree,
-) -> ExpandResult<tt::Subtree> {
- // dummy implementation for type-checking purposes
- let expanded = quote! {
- 0 as u32
- };
-
- ExpandResult::ok(expanded)
-}
-
fn assert_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ',');
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args {
[cond, panic_args @ ..] => {
let comma = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
- span: tt::TokenId::unspecified(),
+ span,
}))],
};
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
- quote! {{
+ quote! {span =>{
if !(#cond) {
- #DOLLAR_CRATE::panic!(##panic_args);
+ #dollar_crate::panic!(##panic_args);
}
}}
}
- [] => quote! {{}},
+ [] => quote! {span =>{}},
};
ExpandResult::ok(expanded)
@@ -211,12 +233,13 @@ fn file_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "".
let file_name = "";
- let expanded = quote! {
+ let expanded = quote! {span =>
#file_name
};
@@ -227,16 +250,18 @@ fn format_args_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- format_args_expand_general(db, id, tt, "")
+ format_args_expand_general(db, id, tt, "", span)
}
fn format_args_nl_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- format_args_expand_general(db, id, tt, "\\n")
+ format_args_expand_general(db, id, tt, "\\n", span)
}
fn format_args_expand_general(
@@ -245,11 +270,12 @@ fn format_args_expand_general(
tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly
_end_string: &str,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- let pound = quote! {@PUNCT '#'};
+ let pound = mk_pound(span);
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
- return ExpandResult::ok(quote! {
+ return ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
});
}
@@ -258,25 +284,25 @@ fn asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them.
-
let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) {
match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{
- let krate = DOLLAR_CRATE.clone();
- literals.push(quote!(#krate::format_args!(#lit);));
+ let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
+ literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
}
_ => break,
}
}
- let pound = quote! {@PUNCT '#'};
- let expanded = quote! {
+ let pound = mk_pound(span);
+ let expanded = quote! {span =>
builtin #pound asm (
{##literals}
)
@@ -288,20 +314,22 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level)
- ExpandResult::ok(quote! {})
+ ExpandResult::ok(quote! {span =>})
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
- let expanded = if enabled { quote!(true) } else { quote!(false) };
+ let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
ExpandResult::ok(expanded)
}
@@ -309,13 +337,15 @@ fn panic_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
// Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
- quote!(#DOLLAR_CRATE::panic::panic_2021!)
+ quote!(span =>#dollar_crate::panic::panic_2021!)
} else {
- quote!(#DOLLAR_CRATE::panic::panic_2015!)
+ quote!(span =>#dollar_crate::panic::panic_2015!)
};
// Pass the original arguments
@@ -327,13 +357,15 @@ fn unreachable_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}`
+ let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
- quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
+ quote!(span =>#dollar_crate::panic::unreachable_2021!)
} else {
- quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
+ quote!(span =>#dollar_crate::panic::unreachable_2015!)
};
// Pass the original arguments
@@ -363,6 +395,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@@ -372,13 +405,14 @@ fn compile_error_expand(
_ => ExpandError::other("`compile_error!` argument must be a string"),
};
- ExpandResult { value: quote! {}, err: Some(err) }
+ ExpandResult { value: quote! {span =>}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
@@ -418,13 +452,14 @@ fn concat_expand(
}
}
}
- ExpandResult { value: quote!(#text), err }
+ ExpandResult { value: quote!(span =>#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
@@ -457,8 +492,25 @@ fn concat_bytes_expand(
}
}
}
- let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: quote!([#ident]), err }
+ let value = tt::Subtree {
+ delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+ token_trees: {
+ Itertools::intersperse_with(
+ bytes.into_iter().map(|it| {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
+ }),
+ || {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ span,
+ }))
+ },
+ )
+ .collect()
+ },
+ };
+ ExpandResult { value, err }
}
fn concat_bytes_expand_subtree(
@@ -491,6 +543,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
@@ -505,8 +558,9 @@ fn concat_idents_expand(
}
}
}
- let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
- ExpandResult { value: quote!(#ident), err }
+ // FIXME merge spans
+ let ident = tt::Ident { text: ident.into(), span };
+ ExpandResult { value: quote!(span =>#ident), err }
}
fn relative_file(
@@ -541,45 +595,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
- _tt: &tt::Subtree,
+ tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- match db.include_expand(arg_id) {
- Ok((res, _)) => ExpandResult::ok(res.0.clone()),
- Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
+ let file_id = match include_input_to_file_id(db, arg_id, tt) {
+ Ok(it) => it,
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
+ };
+ match parse_to_token_tree(
+ SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+ SyntaxContextId::ROOT,
+ &db.file_text(file_id),
+ ) {
+ Some(it) => ExpandResult::ok(it),
+ None => ExpandResult::new(
+ tt::Subtree::empty(DelimSpan { open: span, close: span }),
+ ExpandError::other("failed to parse included file"),
+ ),
}
}
-pub(crate) fn include_arg_to_tt(
+pub fn include_input_to_file_id(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
-) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
- let loc = db.lookup_intern_macro_call(arg_id);
- let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
- panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
- };
- let path = parse_string(&arg.0)?;
- let file_id = relative_file(db, *arg_id, &path, false)?;
-
- let (subtree, map) =
- parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
- Ok((triomphe::Arc::new((subtree, map)), file_id))
+ arg: &tt::Subtree,
+) -> Result<FileId, ExpandError> {
+ relative_file(db, arg_id, &parse_string(arg)?, false)
}
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
- tt: &tt::Subtree,
+ _tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
- if let Err(e) = parse_string(tt) {
- return ExpandResult::new(tt::Subtree::empty(), e);
- }
-
// FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(),
- span: tt::TokenId::unspecified(),
+ span,
}))],
};
ExpandResult::ok(res)
@@ -589,10 +646,13 @@ fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
// FIXME: we're not able to read excluded files (which is most of them because
@@ -602,14 +662,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
- return ExpandResult::ok(quote!(""));
+ return ExpandResult::ok(quote!(span =>""));
}
};
let text = db.file_text(file_id);
let text = &*text;
- ExpandResult::ok(quote!(#text))
+ ExpandResult::ok(quote!(span =>#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@@ -621,10 +681,13 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
let mut err = None;
@@ -641,7 +704,7 @@ fn env_expand(
// `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string()
});
- let expanded = quote! { #s };
+ let expanded = quote! {span => #s };
ExpandResult { value: expanded, err }
}
@@ -650,15 +713,18 @@ fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
+ span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
- Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
+ Err(e) => {
+ return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+ }
};
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
- None => quote! { ::core::option::Option::None::<&str> },
- Some(s) => quote! { ::core::option::Option::Some(#s) },
+ None => quote! {span => ::core::option::Option::None::<&str> },
+ Some(s) => quote! {span => ::core::option::Option::Some(#s) },
};
ExpandResult::ok(expanded)
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 5292a5fa1..935669d49 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,22 +1,31 @@
//! Defines database & queries for macro expansion.
-use base_db::{salsa, CrateId, Edition, SourceDatabase};
+use base_db::{
+ salsa::{self, debug::DebugQueryTable},
+ span::SyntaxContextId,
+ CrateId, Edition, FileId, SourceDatabase,
+};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
- ast::{self, HasAttrs, HasDocComments},
- AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
+ ast::{self, HasAttrs},
+ AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc;
use crate::{
- ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
- builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
- BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
- ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
- MacroDefKind, MacroFile, ProcMacroExpander,
+ ast_id_map::AstIdMap,
+ attrs::{collect_attrs, RawAttrs},
+ builtin_attr_macro::pseudo_derive_attr_expansion,
+ builtin_fn_macro::EagerExpander,
+ fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
+ hygiene::{apply_mark, SyntaxContextData, Transparency},
+ span::{RealSpanMap, SpanMap, SpanMapRef},
+ tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
+ ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
+ MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
- pub mac: mbe::DeclarativeMacro,
- pub def_site_token_map: mbe::TokenMap,
+ pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
+ pub transparency: Transparency,
}
impl DeclarativeMacroExpander {
- pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ pub fn expand(
+ &self,
+ db: &dyn ExpandDatabase,
+ tt: tt::Subtree,
+ call_id: MacroCallId,
+ ) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
- None => self.mac.expand(tt).map_err(Into::into),
+ None => self
+ .mac
+ .expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
+ .map_err(Into::into),
}
}
- pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
- self.mac.map_id_down(token_id)
- }
-
- pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
- self.mac.map_id_up(token_id)
+ pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ match self.mac.err() {
+ Some(e) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan::DUMMY),
+ ExpandError::other(format!("invalid macro definition: {e}")),
+ ),
+ None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
+ }
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
+ /// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander),
@@ -69,31 +89,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander),
}
-// FIXME: Get rid of these methods
-impl TokenExpander {
- pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
- match self {
- TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
- TokenExpander::BuiltIn(..)
- | TokenExpander::BuiltInEager(..)
- | TokenExpander::BuiltInAttr(..)
- | TokenExpander::BuiltInDerive(..)
- | TokenExpander::ProcMacro(..) => id,
- }
- }
-
- pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
- match self {
- TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
- TokenExpander::BuiltIn(..)
- | TokenExpander::BuiltInEager(..)
- | TokenExpander::BuiltInAttr(..)
- | TokenExpander::BuiltInDerive(..)
- | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
- }
- }
-}
-
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
// This query is LRU cached
fn parse_macro_expansion(
&self,
- macro_file: MacroFile,
- ) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
+ macro_file: MacroFileId,
+ ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
+ #[salsa::transparent]
+ fn span_map(&self, file_id: HirFileId) -> SpanMap;
+
+ fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental.
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
+ #[salsa::interned]
+ fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
- /// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
- fn macro_arg(
- &self,
- id: MacroCallId,
- ) -> ValueResult<
- Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
- Arc<Box<[SyntaxError]>>,
- >;
- /// Extracts syntax node, corresponding to a macro call. That's a firewall
+ fn setup_syntax_context_root(&self) -> ();
+ #[salsa::transparent]
+ fn dump_syntax_contexts(&self) -> String;
+
+ /// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
- fn macro_arg_node(
+ fn macro_arg(
&self,
id: MacroCallId,
- ) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
+ ) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro.
#[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
-
- /// Expand macro call to a token tree.
- // This query is LRU cached
- fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
- #[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
- fn include_expand(
- &self,
- arg_id: MacroCallId,
- ) -> Result<
- (triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
- ExpandError,
- >;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>;
+}
- fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
+#[inline]
+pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
+ HirFileIdRepr::MacroFile(m) => {
+ SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
+ }
+ }
+}
+
+pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
+ Arc::new(RealSpanMap::from_file(db, file_id))
}
/// This expands the given macro call, but with different arguments. This is
@@ -181,21 +178,36 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
- let token_range = token_to_map.text_range();
+
+ let span_map = RealSpanMap::absolute(FileId::BOGUS);
+ let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
- let censor = censor_for_macro_input(&loc, speculative_args);
- let mut fixups = fixup::fixup_syntax(speculative_args);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
- speculative_args,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
+ let (mut tt, undo_info) = match loc.kind {
+ MacroCallKind::FnLike { .. } => {
+ (mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
+ }
+ MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
+ let censor = censor_for_macro_input(&loc, speculative_args);
+ let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
+ fixups.append.retain(|it, _| match it {
+ syntax::NodeOrToken::Node(it) => !censor.contains(it),
+ syntax::NodeOrToken::Token(_) => true,
+ });
+ fixups.remove.extend(censor);
+ (
+ mbe::syntax_node_to_token_tree_modified(
+ speculative_args,
+ span_map,
+ fixups.append,
+ fixups.remove,
+ ),
+ fixups.undo_info,
+ )
+ }
+ };
- let (attr_arg, token_id) = match loc.kind {
+ let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
@@ -204,65 +216,51 @@ pub fn expand_speculative(
// Attributes may have an input token tree, build the subtree and map for this as well
// then try finding a token id for our token if it is inside this input subtree.
let item = ast::Item::cast(speculative_args.clone())?;
- item.doc_comments_and_attrs()
+ collect_attrs(&item)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
}?;
match attr.token_tree() {
Some(token_tree) => {
- let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
- tree.delimiter = tt::Delimiter::unspecified();
-
- let shift = mbe::Shift::new(&tt);
- shift.shift_all(&mut tree);
-
- let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
- let attr_input_start =
- token_tree.left_delimiter_token()?.text_range().start();
- let range = token_range.checked_sub(attr_input_start)?;
- let token_id = shift.shift(map.token_by_range(range)?);
- Some(token_id)
- } else {
- None
- };
- (Some(tree), token_id)
- }
- _ => (None, None),
- }
- }
- _ => (None, None),
- };
- let token_id = match token_id {
- Some(token_id) => token_id,
- // token wasn't inside an attribute input so it has to be in the general macro input
- None => {
- let range = token_range.checked_sub(speculative_args.text_range().start())?;
- let token_id = spec_args_tmap.token_by_range(range)?;
- match loc.def.kind {
- MacroDefKind::Declarative(it) => {
- db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
+ let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
+ tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+
+ Some(tree)
}
- _ => token_id,
+ _ => None,
}
}
+ _ => None,
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
- tt.delimiter = tt::Delimiter::unspecified();
- expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ let call_site = loc.span(db);
+ expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &tt,
+ attr_arg.as_ref(),
+ call_site,
+ call_site,
+ call_site,
+ )
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
- pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
+ pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
}
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
- expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
+ expander.expand(db, actual_macro_call, &adt, span_map)
+ }
+ MacroDefKind::Declarative(it) => {
+ db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
}
- MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@@ -270,13 +268,14 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
- let expand_to = macro_expand_to(db, actual_macro_call);
- fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
+ let expand_to = loc.expand_to();
+
+ fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
- .ranges_by_token(token_id, token_to_map.kind())
+ .ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
@@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
- HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
+ HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
}
}
+// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
+// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
- macro_file: MacroFile,
-) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
+ macro_file: MacroFileId,
+) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion");
- let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
-
- let expand_to = macro_expand_to(db, macro_file.macro_call_id);
-
- tracing::debug!("expanded = {}", tt.as_debug_string());
- tracing::debug!("kind = {:?}", expand_to);
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let expand_to = loc.expand_to();
+ let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
@@ -333,51 +331,138 @@ fn parse_macro_expansion_error(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>> {
- db.parse_macro_expansion(MacroFile { macro_call_id })
+ db.parse_macro_expansion(MacroFileId { macro_call_id })
.map(|it| it.0.errors().to_vec().into_boxed_slice())
}
+fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
+ }
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let (parse, map) = db.parse_macro_expansion(macro_file).value;
+ (parse, SpanMap::ExpansionSpanMap(map))
+ }
+ }
+}
+
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
-) -> ValueResult<
- Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
- Arc<Box<[SyntaxError]>>,
-> {
- let loc = db.lookup_intern_macro_call(id);
-
- if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
- return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
- }
-
- let ValueResult { value, err } = db.macro_arg_node(id);
- let Some(arg) = value else {
- return ValueResult { value: None, err };
+ // FIXME: consider the following by putting fixup info into eager call info args
+ // ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
+) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
+ let mismatched_delimiters = |arg: &SyntaxNode| {
+ let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
+ let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
+ let well_formed_tt =
+ matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
+ if !well_formed_tt {
+ // Don't expand malformed (unbalanced) macro invocations. This is
+ // less than ideal, but trying to expand unbalanced macro calls
+ // sometimes produces pathological, deeply nested code which breaks
+ // all kinds of things.
+ //
+ // Some day, we'll have explicit recursion counters for all
+ // recursive things, at which point this code might be removed.
+ cov_mark::hit!(issue9358_bad_macro_stack_overflow);
+ Some(Arc::new(Box::new([SyntaxError::new(
+ "unbalanced token tree".to_owned(),
+ arg.text_range(),
+ )]) as Box<[_]>))
+ } else {
+ None
+ }
};
+ let loc = db.lookup_intern_macro_call(id);
+ if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
+ .then(|| loc.eager.as_deref())
+ .flatten()
+ {
+ ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
+ } else {
+ let (parse, map) = parse_with_map(db, loc.kind.file_id());
+ let root = parse.syntax_node();
+
+ let syntax = match loc.kind {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = &ast_id.to_ptr(db).to_node(&root);
+ let offset = node.syntax().text_range().start();
+ match node.token_tree() {
+ Some(tt) => {
+ let tt = tt.syntax();
+ if let Some(e) = mismatched_delimiters(tt) {
+ return ValueResult::only_err(e);
+ }
+ tt.clone()
+ }
+ None => {
+ return ValueResult::only_err(Arc::new(Box::new([
+ SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
+ ])));
+ }
+ }
+ }
+ MacroCallKind::Derive { ast_id, .. } => {
+ ast_id.to_ptr(db).to_node(&root).syntax().clone()
+ }
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
+ };
+ let (mut tt, undo_info) = match loc.kind {
+ MacroCallKind::FnLike { .. } => {
+ (mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
+ }
+ MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
+ let censor = censor_for_macro_input(&loc, &syntax);
+ let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
+ fixups.append.retain(|it, _| match it {
+ syntax::NodeOrToken::Node(it) => !censor.contains(it),
+ syntax::NodeOrToken::Token(_) => true,
+ });
+ fixups.remove.extend(censor);
+ {
+ let mut tt = mbe::syntax_node_to_token_tree_modified(
+ &syntax,
+ map.as_ref(),
+ fixups.append.clone(),
+ fixups.remove.clone(),
+ );
+ reverse_fixups(&mut tt, &fixups.undo_info);
+ }
+ (
+ mbe::syntax_node_to_token_tree_modified(
+ &syntax,
+ map,
+ fixups.append,
+ fixups.remove,
+ ),
+ fixups.undo_info,
+ )
+ }
+ };
- let node = SyntaxNode::new_root(arg);
- let censor = censor_for_macro_input(&loc, &node);
- let mut fixups = fixup::fixup_syntax(&node);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
- &node,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
+ if loc.def.is_proc_macro() {
+ // proc macros expect their inputs without parentheses, MBEs expect it with them included
+ tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
+ }
- if loc.def.is_proc_macro() {
- // proc macros expect their inputs without parentheses, MBEs expect it with them included
- tt.delimiter = tt::Delimiter::unspecified();
- }
- let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
- match err {
- Some(err) => ValueResult::new(val, err),
- None => ValueResult::ok(val),
+ if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
+ match parse.errors() {
+ [] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
+ errors => ValueResult::new(
+ Some((Arc::new(tt), undo_info)),
+ // Box::<[_]>::from(res.errors()), not stable yet
+ Arc::new(errors.to_vec().into_boxed_slice()),
+ ),
+ }
+ } else {
+ ValueResult::ok(Some((Arc::new(tt), undo_info)))
+ }
}
}
+// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped
@@ -403,10 +488,9 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring);
- ast::Item::cast(node.clone())?
- .doc_comments_and_attrs()
+ collect_attrs(&ast::Item::cast(node.clone())?)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
.map(|attr| attr.syntax().clone())
.into_iter()
.collect()
@@ -417,103 +501,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
-fn macro_arg_node(
- db: &dyn ExpandDatabase,
- id: MacroCallId,
-) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
- let err = || -> Arc<Box<[_]>> {
- Arc::new(Box::new([SyntaxError::new_at_offset(
- "invalid macro call".to_owned(),
- syntax::TextSize::from(0),
- )]))
- };
- let loc = db.lookup_intern_macro_call(id);
- let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
- let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
- Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
- } else {
- loc.kind
- .arg(db)
- .and_then(|arg| ast::TokenTree::cast(arg.value))
- .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
- };
- match res {
- Some(res) if res.errors().is_empty() => res.syntax_node(),
- Some(res) => {
- return ValueResult::new(
- Some(res.syntax_node().green().into()),
- // Box::<[_]>::from(res.errors()), not stable yet
- Arc::new(res.errors().to_vec().into_boxed_slice()),
- );
- }
- None => return ValueResult::only_err(err()),
- }
- } else {
- match loc.kind.arg(db) {
- Some(res) => res.value,
- None => return ValueResult::only_err(err()),
- }
- };
- if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
- let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
- let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
- let well_formed_tt =
- matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
- if !well_formed_tt {
- // Don't expand malformed (unbalanced) macro invocations. This is
- // less than ideal, but trying to expand unbalanced macro calls
- // sometimes produces pathological, deeply nested code which breaks
- // all kinds of things.
- //
- // Some day, we'll have explicit recursion counters for all
- // recursive things, at which point this code might be removed.
- cov_mark::hit!(issue9358_bad_macro_stack_overflow);
- return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
- "unbalanced token tree".to_owned(),
- arg.text_range(),
- )])));
- }
- }
- ValueResult::ok(Some(arg.green().into()))
-}
-
fn decl_macro_expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
- let (mac, def_site_token_map) = match id.to_node(db) {
- ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
- Some(arg) => {
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
- (mac, def_site_token_map)
- }
- None => (
- mbe::DeclarativeMacro::from_err(
+ let (root, map) = parse_with_map(db, id.file_id);
+ let root = root.syntax_node();
+
+ let transparency = |node| {
+ // ... would be nice to have the item tree here
+ let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
+ match &*attrs
+ .iter()
+ .find(|it| {
+ it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
+ })?
+ .token_tree_value()?
+ .token_trees
+ {
+ [tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
+ "transparent" => Some(Transparency::Transparent),
+ "semitransparent" => Some(Transparency::SemiTransparent),
+ "opaque" => Some(Transparency::Opaque),
+ _ => None,
+ },
+ _ => None,
+ }
+ };
+
+ let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
+ ast::Macro::MacroRules(macro_rules) => (
+ match macro_rules.token_tree() {
+ Some(arg) => {
+ let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
+ mac
+ }
+ None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
- Default::default(),
- ),
- },
- ast::Macro::MacroDef(macro_def) => match macro_def.body() {
- Some(arg) => {
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
- (mac, def_site_token_map)
- }
- None => (
- mbe::DeclarativeMacro::from_err(
+ },
+ transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
+ ),
+ ast::Macro::MacroDef(macro_def) => (
+ match macro_def.body() {
+ Some(arg) => {
+ let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
+ mac
+ }
+ None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
- Default::default(),
- ),
- },
+ },
+ transparency(&macro_def).unwrap_or(Transparency::Opaque),
+ ),
};
- Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
+ Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@@ -529,39 +577,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
}
}
-fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
+fn macro_expand(
+ db: &dyn ExpandDatabase,
+ macro_call_id: MacroCallId,
+ loc: MacroCallLoc,
+) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
- let loc = db.lookup_intern_macro_call(id);
let ExpandResult { value: tt, mut err } = match loc.def.kind {
- MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
+ MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
- let arg = db.macro_arg_node(id).value.unwrap();
-
- let node = SyntaxNode::new_root(arg);
- let censor = censor_for_macro_input(&loc, &node);
- let mut fixups = fixup::fixup_syntax(&node);
- fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
- let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
- &node,
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
- fixups.append,
- );
-
- // this cast is a bit sus, can we avoid losing the typedness here?
- let adt = ast::Adt::cast(node).unwrap();
- let mut res = expander.expand(db, id, &adt, &tmap);
- fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
- res
+ let (root, map) = parse_with_map(db, loc.kind.file_id());
+ let root = root.syntax_node();
+ let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
+ let node = ast_id.to_ptr(db).to_node(&root);
+
+ // FIXME: Use censoring
+ let _censor = censor_for_macro_input(&loc, node.syntax());
+ expander.expand(db, macro_call_id, &node, map.as_ref())
}
_ => {
- let ValueResult { value, err } = db.macro_arg(id);
- let Some(macro_arg) = value else {
+ let ValueResult { value, err } = db.macro_arg(macro_call_id);
+ let Some((macro_arg, undo_info)) = value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@@ -570,12 +610,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
};
};
- let (arg, arg_tm, undo_info) = &*macro_arg;
- let mut res = match loc.def.kind {
+ let arg = &*macro_arg;
+ match loc.def.kind {
MacroDefKind::Declarative(id) => {
- db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
+ db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
+ }
+ MacroDefKind::BuiltIn(it, _) => {
+ it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
- MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@@ -583,11 +625,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
- let mut arg = arg.clone();
- fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
-
return ExpandResult {
- value: Arc::new(arg),
+ value: macro_arg.clone(),
err: err.map(|err| {
let mut buf = String::new();
for err in &**err {
@@ -600,12 +639,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}),
};
}
- MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
- MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
+ MacroDefKind::BuiltInEager(it, _) => {
+ it.expand(db, macro_call_id, &arg).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInAttr(it, _) => {
+ let mut res = it.expand(db, macro_call_id, &arg);
+ fixup::reverse_fixups(&mut res.value, &undo_info);
+ res
+ }
_ => unreachable!(),
- };
- fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
- res
+ }
}
};
@@ -614,9 +657,12 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
err = error.clone().or(err);
}
- // Set a hard limit for the expanded tt
- if let Err(value) = check_tt_count(&tt) {
- return value;
+ // Skip checking token tree limit for include! macro call
+ if !loc.def.is_include() {
+ // Set a hard limit for the expanded tt
+ if let Err(value) = check_tt_count(&tt) {
+ return value;
+ }
}
ExpandResult { value: Arc::new(tt), err }
@@ -624,10 +670,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
- let Some(macro_arg) = db.macro_arg(id).value else {
+ let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@@ -636,47 +682,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
};
};
- let (arg_tt, arg_tm, undo_info) = &*macro_arg;
-
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
};
let attr_arg = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => {
- let mut attr_args = attr_args.0.clone();
- mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
- Some(attr_args)
- }
+ MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
_ => None,
};
- let ExpandResult { value: mut tt, err } =
- expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
+ let call_site = loc.span(db);
+ let ExpandResult { value: mut tt, err } = expander.expand(
+ db,
+ loc.def.krate,
+ loc.krate,
+ &macro_arg,
+ attr_arg,
+ // FIXME
+ call_site,
+ call_site,
+ // FIXME
+ call_site,
+ );
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
- fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
+ fixup::reverse_fixups(&mut tt, &undo_info);
ExpandResult { value: Arc::new(tt), err }
}
-fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
- Arc::new(HygieneFrame::new(db, file_id))
-}
-
-fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
- db.lookup_intern_macro_call(id).expand_to()
-}
-
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
-) -> (Parse<SyntaxNode>, mbe::TokenMap) {
+) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@@ -692,7 +735,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult {
value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![],
}),
err: Some(ExpandError::other(format!(
@@ -705,3 +748,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(())
}
}
+
+fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
+ db.intern_syntax_context(SyntaxContextData::root());
+}
+
+fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
+ let mut s = String::from("Expansions:");
+ let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
+ entries.sort_by_key(|e| e.key);
+ for e in entries {
+ let id = e.key;
+ let expn_data = e.value.as_ref().unwrap();
+ s.push_str(&format!(
+ "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
+ id,
+ expn_data.kind.file_id(),
+ expn_data.call_site,
+ SyntaxContextId::ROOT, // FIXME expn_data.def_site,
+ expn_data.kind.descr(),
+ ));
+ }
+
+ s.push_str("\n\nSyntaxContexts:\n");
+ let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
+ entries.sort_by_key(|e| e.key);
+ for e in entries {
+ struct SyntaxContextDebug<'a>(
+ &'a dyn ExpandDatabase,
+ SyntaxContextId,
+ &'a SyntaxContextData,
+ );
+
+ impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.2.fancy_debug(self.1, self.0, f)
+ }
+ }
+ stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
+ }
+ s
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index 4110f2847..8d55240ae 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -18,18 +18,17 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
-use base_db::CrateId;
-use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
+use base_db::{span::SyntaxContextId, CrateId};
+use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
- hygiene::Hygiene,
mod_path::ModPath,
- EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
- MacroCallLoc, MacroDefId, MacroDefKind,
+ span::SpanMapRef,
+ EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
+ MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
+ call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
@@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
krate,
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
+ call_site,
});
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
- // we need this map here as the expansion of the eager input fake file loses whitespace ...
- let mut ws_mapping = FxHashMap::default();
- if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
- ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
- Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
- }));
- }
+
+ let mut arg_map = ExpansionSpanMap::empty();
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
db,
- &Hygiene::new(db, macro_call.file_id),
+ &arg_exp_map,
+ &mut arg_map,
+ TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate,
+ call_site,
resolver,
)
};
let err = parse_err.or(err);
+ if cfg!(debug_assertions) {
+ arg_map.finish();
+ }
- let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
+ let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err };
};
- let (mut subtree, expanded_eager_input_token_map) =
- mbe::syntax_node_to_token_tree(&expanded_eager_input);
+ let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
- let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
- let mut ids_used = FxHashSet::default();
- let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
- // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
- // so we need to remap them to the original input of the eager macro.
- subtree.visit_ids(&mut |id| {
- // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
-
- if let Some(range) = expanded_eager_input_token_map
- .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
- {
- // remap from expanded eager input to eager input expansion
- if let Some(og_range) = mapping.get(&range) {
- // remap from eager input expansion to original eager input
- if let Some(&og_range) = ws_mapping.get(og_range) {
- if let Some(og_token) = og_tmap.token_by_range(og_range) {
- ids_used.insert(og_token);
- return og_token;
- }
- }
- }
- }
- tt::TokenId::UNSPECIFIED
- });
- og_tmap.filter(|id| ids_used.contains(&id));
- og_tmap
- } else {
- Default::default()
- };
- subtree.delimiter = crate::tt::Delimiter::unspecified();
+ subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
let loc = MacroCallLoc {
def,
krate,
- eager: Some(Box::new(EagerCallInfo {
- arg: Arc::new((subtree, og_tmap)),
- arg_id,
- error: err.clone(),
- })),
+ eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
+ call_site,
};
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@@ -132,12 +101,13 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
-) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
+ call_site: SyntaxContextId,
+) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id);
- let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
+ let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
@@ -146,57 +116,59 @@ fn lazy_expand(
fn eager_macro_recur(
db: &dyn ExpandDatabase,
- hygiene: &Hygiene,
+ span_map: &ExpansionSpanMap,
+ expanded_map: &mut ExpansionSpanMap,
+ mut offset: TextSize,
curr: InFile<SyntaxNode>,
krate: CrateId,
+ call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
+) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();
- let mut mapping = FxHashMap::default();
let mut replacements = Vec::new();
// FIXME: We only report a single error inside of eager expansions
let mut error = None;
- let mut offset = 0i32;
- let apply_offset = |it: TextSize, offset: i32| {
- TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
- };
let mut children = original.preorder_with_tokens();
// Collect replacement
while let Some(child) = children.next() {
- let WalkEvent::Enter(child) = child else { continue };
let call = match child {
- syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
+ WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
Some(it) => {
children.skip_subtree();
it
}
- None => continue,
+ _ => continue,
},
- syntax::NodeOrToken::Token(t) => {
- mapping.insert(
- TextRange::new(
- apply_offset(t.text_range().start(), offset),
- apply_offset(t.text_range().end(), offset),
- ),
- t.text_range(),
- );
+ WalkEvent::Enter(_) => continue,
+ WalkEvent::Leave(child) => {
+ if let SyntaxElement::Token(t) = child {
+ let start = t.text_range().start();
+ offset += t.text_range().len();
+ expanded_map.push(offset, span_map.span_at(start));
+ }
continue;
}
};
- let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+
+ let def = match call
+ .path()
+ .and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
+ {
Some(path) => match macro_resolver(path.clone()) {
Some(def) => def,
None => {
error =
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
+ offset += call.syntax().text_range().len();
continue;
}
},
None => {
error = Some(ExpandError::other("malformed macro invocation"));
+ offset += call.syntax().text_range().len();
continue;
}
};
@@ -207,29 +179,22 @@ fn eager_macro_recur(
krate,
curr.with_value(call.clone()),
def,
+ call_site,
macro_resolver,
);
match value {
Some(call_id) => {
- let ExpandResult { value, err: err2 } =
+ let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
- if let Some(tt) = call.token_tree() {
- let call_tt_start = tt.syntax().text_range().start();
- let call_start =
- apply_offset(call.syntax().text_range().start(), offset);
- if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
- mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
- value
- .1
- .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
- .map(|r| (r + call_start, range + call_tt_start))
- }));
- }
- }
+ map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
+ let syntax_node = parse.syntax_node();
ExpandResult {
- value: Some(value.0.syntax_node().clone_for_update()),
+ value: Some((
+ syntax_node.clone_for_update(),
+ offset + syntax_node.text_range().len(),
+ )),
err: err.or(err2),
}
}
@@ -242,45 +207,23 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } =
- lazy_expand(db, &def, curr.with_value(call.clone()), krate);
- let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
- Some(db.decl_macro_expander(def.krate, ast_id))
- } else {
- None
- };
+ lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
// replace macro inside
- let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
- &hygiene,
+ &tm,
+ expanded_map,
+ offset,
// FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()),
krate,
+ call_site,
macro_resolver,
);
let err = err.or(error);
- if let Some(tt) = call.token_tree() {
- let call_tt_start = tt.syntax().text_range().start();
- let call_start = apply_offset(call.syntax().text_range().start(), offset);
- if let Some((_tt, arg_map, _)) = parse
- .file_id
- .macro_file()
- .and_then(|id| db.macro_arg(id.macro_call_id).value)
- .as_deref()
- {
- mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
- tm.first_range_by_token(
- decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
- syntax::SyntaxKind::TOMBSTONE,
- )
- .map(|r| (r + call_start, range + call_tt_start))
- }));
- }
- }
- // FIXME: Do we need to re-use _m here?
- ExpandResult { value: value.map(|(n, _m)| n), err }
+ ExpandResult { value, err }
}
};
if err.is_some() {
@@ -288,16 +231,18 @@ fn eager_macro_recur(
}
// check if the whole original syntax is replaced
if call.syntax() == &original {
- return ExpandResult { value: value.zip(Some(mapping)), err: error };
+ return ExpandResult { value, err: error };
}
- if let Some(insert) = value {
- offset += u32::from(insert.text_range().len()) as i32
- - u32::from(call.syntax().text_range().len()) as i32;
- replacements.push((call, insert));
+ match value {
+ Some((insert, new_offset)) => {
+ replacements.push((call, insert));
+ offset = new_offset;
+ }
+ None => offset += call.syntax().text_range().len(),
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
- ExpandResult { value: Some((original, mapping)), err: error }
+ ExpandResult { value: Some((original, offset)), err: error }
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
new file mode 100644
index 000000000..89f0685d5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
@@ -0,0 +1,375 @@
+//! Things to wrap other things in file ids.
+use std::iter;
+
+use base_db::{
+ span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
+ FileId, FileRange,
+};
+use either::Either;
+use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
+
+use crate::{db, ExpansionInfo, MacroFileIdExt};
+
+/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
+///
+/// Typical usages are:
+///
+/// * `InFile<SyntaxNode>` -- syntax node in a file
+/// * `InFile<ast::FnDef>` -- ast node in a file
+/// * `InFile<TextSize>` -- offset in a file
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InFileWrapper<FileKind, T> {
+ pub file_id: FileKind,
+ pub value: T,
+}
+pub type InFile<T> = InFileWrapper<HirFileId, T>;
+pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
+pub type InRealFile<T> = InFileWrapper<FileId, T>;
+
+impl<FileKind, T> InFileWrapper<FileKind, T> {
+ pub fn new(file_id: FileKind, value: T) -> Self {
+ Self { file_id, value }
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
+ InFileWrapper::new(self.file_id, f(self.value))
+ }
+}
+
+impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
+ pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
+ InFileWrapper::new(self.file_id, value)
+ }
+
+ pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
+ self.with_value(&self.value)
+ }
+}
+
+impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
+ pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
+ self.with_value(self.value.clone())
+ }
+}
+
+impl<T> From<InMacroFile<T>> for InFile<T> {
+ fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
+ InFile { file_id: file_id.into(), value }
+ }
+}
+
+impl<T> From<InRealFile<T>> for InFile<T> {
+ fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
+ InFile { file_id: file_id.into(), value }
+ }
+}
+
+// region:transpose impls
+
+impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
+ pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
+ Some(InFileWrapper::new(self.file_id, self.value?))
+ }
+}
+
+impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
+ pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
+ match self.value {
+ Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
+ Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
+ }
+ }
+}
+
+// endregion:transpose impls
+
+trait FileIdToSyntax: Copy {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
+}
+
+impl FileIdToSyntax for FileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse(self).syntax_node()
+ }
+}
+impl FileIdToSyntax for MacroFileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse_macro_expansion(self).value.0.syntax_node()
+ }
+}
+impl FileIdToSyntax for HirFileId {
+ fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ db.parse_or_expand(self)
+ }
+}
+
+#[allow(private_bounds)]
+impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
+ pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ FileIdToSyntax::file_syntax(self.file_id, db)
+ }
+}
+
+impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
+ pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
+
+// region:specific impls
+
+impl InFile<&SyntaxNode> {
+ /// Skips the attributed item that caused the macro invocation we are climbing up
+ pub fn ancestors_with_macros_skip_attr_item(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let macro_file_id = node.file_id.macro_file()?;
+ let parent_node = macro_file_id.call_node(db);
+ if macro_file_id.is_attr_macro(db) {
+ // macro call was an attributed item, skip it
+ // FIXME: does this fail if this is a direct expansion of another macro?
+ parent_node.map(|node| node.parent()).transpose()
+ } else {
+ Some(parent_node)
+ }
+ }
+ };
+ iter::successors(succ(&self.cloned()), succ)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ ///
+ /// For attributes and derives, this will point back to the attribute only.
+ /// For the entire item use [`InFile::original_file_range_full`].
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some((res, ctxt)) =
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ {
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return res;
+ }
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some((res, ctxt)) =
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ {
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return res;
+ }
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range_with_body(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
+ }
+ }
+ }
+
+ pub fn original_syntax_node(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<InRealFile<SyntaxNode>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ let file_id = match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ return Some(InRealFile { file_id, value: self.value.clone() })
+ }
+ HirFileIdRepr::MacroFile(m) => m,
+ };
+ if !file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ let (FileRange { file_id, range }, ctx) =
+ ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if !ctx.is_root() {
+ return None;
+ }
+
+ let anc = db.parse(file_id).syntax_node().covering_element(range);
+ let kind = self.value.kind();
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
+ let value = anc.ancestors().find(|it| it.kind() == kind)?;
+ Some(InRealFile::new(file_id, value))
+ }
+}
+
+impl InMacroFile<SyntaxToken> {
+ pub fn upmap_once(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
+ self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range())
+ }
+}
+
+impl InFile<SyntaxToken> {
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ let (range, ctxt) = ExpansionInfo::new(db, mac_file)
+ .span_for_offset(db, self.value.text_range().start());
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ return range;
+ }
+
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some(FileRange { file_id, range: self.value.text_range() })
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ let (range, ctxt) = ExpansionInfo::new(db, mac_file)
+ .span_for_offset(db, self.value.text_range().start());
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if ctxt.is_root() {
+ Some(range)
+ } else {
+ None
+ }
+ }
+ }
+ }
+}
+
+impl InMacroFile<TextSize> {
+ pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
+ ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
+ }
+}
+
+impl InFile<TextRange> {
+ pub fn original_node_file_range(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> (FileRange, SyntaxContextId) {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ (FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ Some(it) => it,
+ None => {
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ (loc.kind.original_call_range(db), SyntaxContextId::ROOT)
+ }
+ }
+ }
+ }
+ }
+
+ pub fn original_node_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ match ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value) {
+ Some((it, SyntaxContextId::ROOT)) => it,
+ _ => {
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+ }
+ }
+
+ pub fn original_node_file_range_opt(
+ self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
+ }
+ HirFileIdRepr::MacroFile(mac_file) => {
+ ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value)
+ }
+ }
+ }
+}
+
+impl<N: AstNode> InFile<N> {
+ pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
+ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ let file_id = match self.file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => {
+ return Some(InRealFile { file_id, value: self.value })
+ }
+ HirFileIdRepr::MacroFile(m) => m,
+ };
+ if !file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
+ .map_node_range_up(db, self.value.syntax().text_range())?;
+
+ // FIXME: Figure out an API that makes proper use of ctx, this only exists to
+ // keep pre-token map rewrite behaviour.
+ if !ctx.is_root() {
+ return None;
+ }
+
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
+ let anc = db.parse(file_id).syntax_node().covering_element(range);
+ let value = anc.ancestors().find_map(N::cast)?;
+ Some(InRealFile::new(file_id, value))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index e6e8d8c02..346cd39a7 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -1,111 +1,126 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
-use std::mem;
-use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
-use rustc_hash::FxHashMap;
+use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData},
+ FileId,
+};
+use la_arena::RawIdx;
+use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
+use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
- match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+use triomphe::Arc;
+use tt::{Spacing, Span};
+
+use crate::{
+ span::SpanMapRef,
+ tt::{Ident, Leaf, Punct, Subtree},
};
-use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
- pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
+ pub(crate) remove: FxHashSet<SyntaxNode>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
- pub(crate) token_map: TokenMap,
- pub(crate) next_id: u32,
}
/// This is the information needed to reverse the fixups.
-#[derive(Debug, Default, PartialEq, Eq)]
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
- original: Box<[Subtree]>,
+ // FIXME: ThinArc<[Subtree]>
+ original: Option<Arc<Box<[Subtree]>>>,
+}
+
+impl SyntaxFixupUndoInfo {
+ pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
-const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+// censoring -> just don't convert the node
+// replacement -> censor + append
+// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
+// to remove later
+const FIXUP_DUMMY_FILE: FileId = FileId::from_raw(FileId::MAX_FILE_ID);
+const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::from_u32(!0));
+const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
+const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
-pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
- let mut replace = FxHashMap::<SyntaxElement, _>::default();
+ let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
- let mut token_map = TokenMap::default();
- let mut next_id = 0;
+ let dummy_range = FIXUP_DUMMY_RANGE;
+ // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
+ // the index into the replacement vec but only if the end points to !0
+ let dummy_anchor = SpanAnchor { file_id: FIXUP_DUMMY_FILE, ast_id: FIXUP_DUMMY_AST_ID };
+ let fake_span = |range| SpanData {
+ range: dummy_range,
+ anchor: dummy_anchor,
+ ctx: span_map.span_for_range(range).ctx,
+ };
while let Some(event) = preorder.next() {
- let node = match event {
- syntax::WalkEvent::Enter(node) => node,
- syntax::WalkEvent::Leave(_) => continue,
- };
+ let syntax::WalkEvent::Enter(node) = event else { continue };
+ let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) {
+ remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
- let (original_tree, new_tmap, new_next_id) =
- mbe::syntax_node_to_token_tree_with_modifications(
- &node,
- mem::take(&mut token_map),
- next_id,
- Default::default(),
- Default::default(),
- );
- token_map = new_tmap;
- next_id = new_next_id;
+ let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
let idx = original.len() as u32;
original.push(original_tree);
- let replacement = SyntheticToken {
- kind: SyntaxKind::IDENT,
+ let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: node.text_range(),
- id: SyntheticTokenId(idx),
- };
- replace.insert(node.clone().into(), vec![replacement]);
+ span: SpanData {
+ range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
+ anchor: dummy_anchor,
+ ctx: span_map.span_for_range(node_range).ctx,
+ },
+ });
+ append.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree();
continue;
}
+
// In some other situations, we can fix things by just appending some tokens.
- let end_range = TextRange::empty(node.text_range().end());
match_ast! {
match node {
ast::FieldExpr(it) => {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range),
+ }),
]);
}
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range),
+ }),
]);
}
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::SEMICOLON,
- text: ";".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ Leaf::Punct(Punct {
+ char: ';',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -117,28 +132,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(if_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -150,46 +162,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(while_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -201,29 +209,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue
};
append.insert(match_token.into(), vec![
- SyntheticToken {
- kind: SyntaxKind::IDENT,
+ Leaf::Ident(Ident {
text: "__ra_fixup".into(),
- range: end_range,
- id: EMPTY_ID
- },
+ span: fake_span(node_range)
+ }),
]);
}
if it.match_arm_list().is_none() {
// No match arms
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -234,10 +239,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
};
let [pat, in_token, iter] = [
- (SyntaxKind::UNDERSCORE, "_"),
- (SyntaxKind::IN_KW, "in"),
- (SyntaxKind::IDENT, "__ra_fixup")
- ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
+ "_",
+ "in",
+ "__ra_fixup"
+ ].map(|text|
+ Leaf::Ident(Ident {
+ text: text.into(),
+ span: fake_span(node_range)
+ }),
+ );
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]);
@@ -248,18 +258,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
- SyntheticToken {
- kind: SyntaxKind::L_CURLY,
- text: "{".into(),
- range: end_range,
- id: EMPTY_ID,
- },
- SyntheticToken {
- kind: SyntaxKind::R_CURLY,
- text: "}".into(),
- range: end_range,
- id: EMPTY_ID,
- },
+ // FIXME: THis should be a subtree no?
+ Leaf::Punct(Punct {
+ char: '{',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
+ Leaf::Punct(Punct {
+ char: '}',
+ spacing: Spacing::Alone,
+ span: fake_span(node_range)
+ }),
]);
}
},
@@ -267,12 +276,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}
}
}
+ let needs_fixups = !append.is_empty() || !original.is_empty();
SyntaxFixups {
append,
- replace,
- token_map,
- next_id,
- undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
+ remove,
+ undo_info: SyntaxFixupUndoInfo {
+ original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
+ },
}
}
@@ -288,36 +298,57 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
-pub(crate) fn reverse_fixups(
- tt: &mut Subtree,
- token_map: &TokenMap,
- undo_info: &SyntaxFixupUndoInfo,
-) {
+pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
+ let Some(undo_info) = undo_info.original.as_deref() else { return };
+ let undo_info = &**undo_info;
+ if never!(
+ tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ ) {
+ tt.delimiter.close = SpanData::DUMMY;
+ tt.delimiter.open = SpanData::DUMMY;
+ }
+ reverse_fixups_(tt, undo_info);
+}
+
+fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts
.into_iter()
+ // delete all fake nodes
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
- token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
- }
- tt::TokenTree::Subtree(st) => {
- token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
+ let span = leaf.span();
+ let is_real_leaf = span.anchor.file_id != FIXUP_DUMMY_FILE;
+ let is_replaced_node = span.range.end() == FIXUP_DUMMY_RANGE_END;
+ is_real_leaf || is_replaced_node
}
+ tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
- reverse_fixups(&mut tt, token_map, undo_info);
+ if tt.delimiter.close.anchor.file_id == FIXUP_DUMMY_FILE
+ || tt.delimiter.open.anchor.file_id == FIXUP_DUMMY_FILE
+ {
+ // Even though fixup never creates subtrees with fixup spans, the old proc-macro server
+ // might copy them if the proc-macro asks for it, so we need to filter those out
+ // here as well.
+ return SmallVec::new_const();
+ }
+ reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
- if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
- let original = undo_info.original[id.0 as usize].clone();
+ if leaf.span().anchor.file_id == FIXUP_DUMMY_FILE {
+ // we have a fake node here, we need to replace it again with the original
+ let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
} else {
SmallVec::from_const([original.into()])
}
} else {
+ // just a normal leaf
SmallVec::from_const([leaf.into()])
}
}
@@ -327,11 +358,15 @@ pub(crate) fn reverse_fixups(
#[cfg(test)]
mod tests {
+ use base_db::FileId;
use expect_test::{expect, Expect};
+ use triomphe::Arc;
- use crate::tt;
-
- use super::reverse_fixups;
+ use crate::{
+ fixup::reverse_fixups,
+ span::{RealSpanMap, SpanMap},
+ tt,
+ };
// The following three functions are only meant to check partial structural equivalence of
// `TokenTree`s, see the last assertion in `check()`.
@@ -361,13 +396,13 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
- let fixups = super::fixup_syntax(&parsed.syntax_node());
- let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
+ let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
+ let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
- fixups.token_map,
- fixups.next_id,
- fixups.replace,
+ span_map.as_ref(),
fixups.append,
+ fixups.remove,
);
let actual = format!("{tt}\n");
@@ -383,14 +418,15 @@ mod tests {
parse.syntax_node()
);
- reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+ reverse_fixups(&mut tt, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing.
- let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
+ let original_as_tt =
+ mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
assert!(
check_subtree_eq(&tt, &original_as_tt),
- "different token tree: {tt:?},\n{original_as_tt:?}"
+ "different token tree:\n{tt:?}\n\n{original_as_tt:?}"
);
}
@@ -403,7 +439,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {for _ in __ra_fixup {}}
+fn foo () {for _ in __ra_fixup { }}
"#]],
)
}
@@ -431,7 +467,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {for bar in qux {}}
+fn foo () {for bar in qux { }}
"#]],
)
}
@@ -462,7 +498,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {match __ra_fixup {}}
+fn foo () {match __ra_fixup { }}
"#]],
)
}
@@ -494,7 +530,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {match __ra_fixup {}}
+fn foo () {match __ra_fixup { }}
"#]],
)
}
@@ -609,7 +645,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if a {}}
+fn foo () {if a { }}
"#]],
)
}
@@ -623,7 +659,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if __ra_fixup {}}
+fn foo () {if __ra_fixup { }}
"#]],
)
}
@@ -637,7 +673,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {if __ra_fixup {} {}}
+fn foo () {if __ra_fixup {} { }}
"#]],
)
}
@@ -651,7 +687,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {while __ra_fixup {}}
+fn foo () {while __ra_fixup { }}
"#]],
)
}
@@ -665,7 +701,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {while foo {}}
+fn foo () {while foo { }}
"#]],
)
}
@@ -692,7 +728,7 @@ fn foo() {
}
"#,
expect![[r#"
-fn foo () {loop {}}
+fn foo () {loop { }}
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index ca65db113..7b03709ac 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -2,252 +2,247 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
-use base_db::CrateId;
-use db::TokenExpander;
-use either::Either;
-use mbe::Origin;
-use syntax::{
- ast::{self, HasDocComments},
- AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
-};
-use triomphe::Arc;
-
-use crate::{
- db::{self, ExpandDatabase},
- fixup,
- name::{AsName, Name},
- HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
-};
-
-#[derive(Clone, Debug)]
-pub struct Hygiene {
- frames: Option<HygieneFrames>,
+use std::iter;
+
+use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
+
+use crate::db::ExpandDatabase;
+
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
+pub struct SyntaxContextData {
+ pub outer_expn: Option<MacroCallId>,
+ pub outer_transparency: Transparency,
+ pub parent: SyntaxContextId,
+ /// This context, but with all transparent and semi-transparent expansions filtered away.
+ pub opaque: SyntaxContextId,
+ /// This context, but with all transparent expansions filtered away.
+ pub opaque_and_semitransparent: SyntaxContextId,
}
-impl Hygiene {
- pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
- Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
+impl std::fmt::Debug for SyntaxContextData {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("SyntaxContextData")
+ .field("outer_expn", &self.outer_expn)
+ .field("outer_transparency", &self.outer_transparency)
+ .field("parent", &self.parent)
+ .field("opaque", &self.opaque)
+ .field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
+ .finish()
}
+}
- pub fn new_unhygienic() -> Hygiene {
- Hygiene { frames: None }
+impl SyntaxContextData {
+ pub fn root() -> Self {
+ SyntaxContextData {
+ outer_expn: None,
+ outer_transparency: Transparency::Opaque,
+ parent: SyntaxContextId::ROOT,
+ opaque: SyntaxContextId::ROOT,
+ opaque_and_semitransparent: SyntaxContextId::ROOT,
+ }
}
- // FIXME: this should just return name
- pub fn name_ref_to_name(
- &self,
+ pub fn fancy_debug(
+ self,
+ self_id: SyntaxContextId,
db: &dyn ExpandDatabase,
- name_ref: ast::NameRef,
- ) -> Either<Name, CrateId> {
- if let Some(frames) = &self.frames {
- if name_ref.text() == "$crate" {
- if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
- return Either::Right(krate);
- }
+ f: &mut std::fmt::Formatter<'_>,
+ ) -> std::fmt::Result {
+ write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
+ match self.outer_expn {
+ Some(id) => {
+ write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
}
+ None => write!(f, "root")?,
}
-
- Either::Left(name_ref.as_name())
+ write!(f, ", {:?})", self.outer_transparency)
}
+}
- pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
- let mut token = path.syntax().first_token()?.text_range();
- let frames = self.frames.as_ref()?;
- let mut current = &frames.0;
-
- loop {
- let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
- if origin == Origin::Def {
- return if current.local_inner {
- frames.root_crate(db, path.syntax())
- } else {
- None
- };
- }
- current = current.call_site.as_ref()?;
- token = mapped.value;
- }
- }
+/// A property of a macro expansion that determines how identifiers
+/// produced by that expansion are resolved.
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
+pub enum Transparency {
+ /// Identifier produced by a transparent expansion is always resolved at call-site.
+ /// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
+ Transparent,
+ /// Identifier produced by a semi-transparent expansion may be resolved
+ /// either at call-site or at definition-site.
+ /// If it's a local variable, label or `$crate` then it's resolved at def-site.
+ /// Otherwise it's resolved at call-site.
+ /// `macro_rules` macros behave like this, built-in macros currently behave like this too,
+ /// but that's an implementation detail.
+ SemiTransparent,
+ /// Identifier produced by an opaque expansion is always resolved at definition-site.
+ /// Def-site spans in procedural macros, identifiers from `macro` by default use this.
+ Opaque,
}
-#[derive(Clone, Debug)]
-struct HygieneFrames(Arc<HygieneFrame>);
+pub fn span_with_def_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
+}
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub struct HygieneFrame {
- expansion: Option<HygieneInfo>,
+pub fn span_with_call_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
+}
- // Indicate this is a local inner macro
- local_inner: bool,
- krate: Option<CrateId>,
+pub fn span_with_mixed_site_ctxt(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+) -> SpanData {
+ span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
+}
- call_site: Option<Arc<HygieneFrame>>,
- def_site: Option<Arc<HygieneFrame>>,
+fn span_with_ctxt_from_mark(
+ db: &dyn ExpandDatabase,
+ span: SpanData,
+ expn_id: MacroCallId,
+ transparency: Transparency,
+) -> SpanData {
+ SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
}
-impl HygieneFrames {
- fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
- // Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
- // usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
- HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
+pub(super) fn apply_mark(
+ db: &dyn ExpandDatabase,
+ ctxt: SyntaxContextId,
+ call_id: MacroCallId,
+ transparency: Transparency,
+) -> SyntaxContextId {
+ if transparency == Transparency::Opaque {
+ return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
- fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
- let mut token = node.first_token()?.text_range();
- let mut result = self.0.krate;
- let mut current = self.0.clone();
-
- while let Some((mapped, origin)) =
- current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
- {
- result = current.krate;
-
- let site = match origin {
- Origin::Def => &current.def_site,
- Origin::Call => &current.call_site,
- };
+ let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
+ let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
+ call_site_ctxt.normalize_to_macros_2_0(db)
+ } else {
+ call_site_ctxt.normalize_to_macro_rules(db)
+ };
- let site = match site {
- None => break,
- Some(it) => it,
- };
-
- current = site.clone();
- token = mapped.value;
- }
+ if call_site_ctxt.is_root() {
+ return apply_mark_internal(db, ctxt, Some(call_id), transparency);
+ }
- result
+ // Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
+ // macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
+ //
+ // In this case, the tokens from the macros 1.0 definition inherit the hygiene
+ // at their invocation. That is, we pretend that the macros 1.0 definition
+ // was defined at its invocation (i.e., inside the macros 2.0 definition)
+ // so that the macros 2.0 definition remains hygienic.
+ //
+ // See the example at `test/ui/hygiene/legacy_interaction.rs`.
+ for (call_id, transparency) in ctxt.marks(db) {
+ call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
}
+ apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
}
-#[derive(Debug, Clone, PartialEq, Eq)]
-struct HygieneInfo {
- file: MacroFile,
- /// The start offset of the `macro_rules!` arguments or attribute input.
- attr_input_or_mac_def_start: Option<InFile<TextSize>>,
+fn apply_mark_internal(
+ db: &dyn ExpandDatabase,
+ ctxt: SyntaxContextId,
+ call_id: Option<MacroCallId>,
+ transparency: Transparency,
+) -> SyntaxContextId {
+ let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
+ let mut opaque = syntax_context_data.opaque;
+ let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
+
+ if transparency >= Transparency::Opaque {
+ let parent = opaque;
+ let new_opaque = SyntaxContextId::SELF_REF;
+ // But we can't just grab the to be allocated ID either as that would not deduplicate
+ // things!
+ // So we need a new salsa store type here ...
+ opaque = db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque: new_opaque,
+ opaque_and_semitransparent: new_opaque,
+ });
+ }
+
+ if transparency >= Transparency::SemiTransparent {
+ let parent = opaque_and_semitransparent;
+ let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
+ opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque,
+ opaque_and_semitransparent: new_opaque_and_semitransparent,
+ });
+ }
- macro_def: TokenExpander,
- macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
- macro_arg_shift: mbe::Shift,
- exp_map: Arc<mbe::TokenMap>,
+ let parent = ctxt;
+ db.intern_syntax_context(SyntaxContextData {
+ outer_expn: call_id,
+ outer_transparency: transparency,
+ parent,
+ opaque,
+ opaque_and_semitransparent,
+ })
+}
+pub trait SyntaxContextExt {
+ fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
+ fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
+ fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
+ fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
+ fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
+ fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
-impl HygieneInfo {
- fn map_ident_up(
- &self,
- db: &dyn ExpandDatabase,
- token: TextRange,
- ) -> Option<(InFile<TextRange>, Origin)> {
- let token_id = self.exp_map.token_by_range(token)?;
- let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
-
- let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
-
- let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
- Some(unshifted) => {
- token_id = unshifted;
- (&attr_args.1, self.attr_input_or_mac_def_start?)
- }
- None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
- },
- _ => match origin {
- mbe::Origin::Call => {
- (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
- }
- mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
- (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
- (&expander.def_site_token_map, *tt)
- }
- _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
- },
- },
- };
-
- let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
- Some((tt.with_value(range + tt.value), origin))
+#[inline(always)]
+fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
+ match n {
+ SyntaxContextId::SELF_REF => p,
+ _ => n,
}
}
-fn make_hygiene_info(
- db: &dyn ExpandDatabase,
- macro_file: MacroFile,
- loc: &MacroCallLoc,
-) -> HygieneInfo {
- let def = loc.def.ast_id().left().and_then(|id| {
- let def_tt = match id.to_node(db) {
- ast::Macro::MacroRules(mac) => mac.token_tree()?,
- ast::Macro::MacroDef(mac) => mac.body()?,
- };
- Some(InFile::new(id.file_id, def_tt))
- });
- let attr_input_or_mac_def = def.or_else(|| match loc.kind {
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
- .token_tree()?;
- Some(InFile::new(ast_id.file_id, tt))
- }
- _ => None,
- });
-
- let macro_def = db.macro_expander(loc.def);
- let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
- let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
- });
-
- HygieneInfo {
- file: macro_file,
- attr_input_or_mac_def_start: attr_input_or_mac_def
- .map(|it| it.map(|tt| tt.syntax().text_range().start())),
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
- macro_arg,
- macro_def,
- exp_map,
+impl SyntaxContextExt for SyntaxContextId {
+ fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
+ handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
+ }
+ fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
+ handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
+ }
+ fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
+ db.lookup_intern_syntax_context(self).parent
+ }
+ fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
+ let data = db.lookup_intern_syntax_context(self);
+ (data.outer_expn, data.outer_transparency)
+ }
+ fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
+ let data = db.lookup_intern_syntax_context(*self);
+ *self = data.parent;
+ (data.outer_expn, data.outer_transparency)
+ }
+ fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
+ let mut marks = marks_rev(self, db).collect::<Vec<_>>();
+ marks.reverse();
+ marks
}
}
-impl HygieneFrame {
- pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
- let (info, krate, local_inner) = match file_id.macro_file() {
- None => (None, None, false),
- Some(macro_file) => {
- let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
- match loc.def.kind {
- MacroDefKind::Declarative(_) => {
- (info, Some(loc.def.krate), loc.def.local_inner)
- }
- MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
- MacroDefKind::BuiltInAttr(..) => (info, None, false),
- MacroDefKind::BuiltInDerive(..) => (info, None, false),
- MacroDefKind::BuiltInEager(..) => (info, None, false),
- MacroDefKind::ProcMacro(..) => (info, None, false),
- }
- }
- };
-
- let Some((info, calling_file)) = info else {
- return HygieneFrame {
- expansion: None,
- local_inner,
- krate,
- call_site: None,
- def_site: None,
- };
- };
-
- let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
- let call_site = Some(db.hygiene_frame(calling_file));
-
- HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
- }
+// FIXME: Make this a SyntaxContextExt method once we have RPIT
+pub fn marks_rev(
+ ctxt: SyntaxContextId,
+ db: &dyn ExpandDatabase,
+) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
+ iter::successors(Some(ctxt), move |&mark| {
+ Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
+ })
+ .map(|ctx| ctx.outer_mark(db))
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index 4be55126b..d7819b315 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -4,7 +4,7 @@
//! tree originates not from the text of some `FileId`, but from some macro
//! expansion.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod db;
pub mod ast_id_map;
@@ -18,39 +18,59 @@ pub mod quote;
pub mod eager;
pub mod mod_path;
pub mod attrs;
+pub mod span;
+pub mod files;
mod fixup;
-use mbe::TokenMap;
-pub use mbe::{Origin, ValueResult};
-
-use ::tt::token_id as tt;
+use attrs::collect_attrs;
use triomphe::Arc;
-use std::{fmt, hash::Hash, iter};
+use std::{fmt, hash::Hash};
use base_db::{
- impl_intern_key,
- salsa::{self, InternId},
+ span::{HirFileIdRepr, SpanData, SyntaxContextId},
CrateId, FileId, FileRange, ProcMacroKind,
};
use either::Either;
use syntax::{
- algo::{self, skip_trivia_token},
- ast::{self, AstNode, HasDocComments},
- AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+ ast::{self, AstNode},
+ SyntaxNode, SyntaxToken, TextRange, TextSize,
};
use crate::{
- ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId},
attrs::AttrId,
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::TokenExpander,
+ fixup::SyntaxFixupUndoInfo,
mod_path::ModPath,
proc_macro::ProcMacroExpander,
+ span::{ExpansionSpanMap, SpanMap},
};
+pub use crate::ast_id_map::{AstId, ErasedAstId, ErasedFileAstId};
+pub use crate::files::{InFile, InMacroFile, InRealFile};
+
+pub use base_db::span::{HirFileId, MacroCallId, MacroFileId};
+pub use mbe::ValueResult;
+
+pub type DeclarativeMacro = ::mbe::DeclarativeMacro<tt::SpanData>;
+
+pub mod tt {
+ pub use base_db::span::SpanData;
+ pub use tt::{DelimiterKind, Spacing, Span, SpanAnchor};
+
+ pub type Delimiter = ::tt::Delimiter<SpanData>;
+ pub type DelimSpan = ::tt::DelimSpan<SpanData>;
+ pub type Subtree = ::tt::Subtree<SpanData>;
+ pub type Leaf = ::tt::Leaf<SpanData>;
+ pub type Literal = ::tt::Literal<SpanData>;
+ pub type Punct = ::tt::Punct<SpanData>;
+ pub type Ident = ::tt::Ident<SpanData>;
+ pub type TokenTree = ::tt::TokenTree<SpanData>;
+}
+
pub type ExpandResult<T> = ValueResult<T, ExpandError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
@@ -59,6 +79,7 @@ pub enum ExpandError {
Mbe(mbe::ExpandError),
RecursionOverflowPoisoned,
Other(Box<Box<str>>),
+ ProcMacroPanic(Box<Box<str>>),
}
impl ExpandError {
@@ -81,56 +102,24 @@ impl fmt::Display for ExpandError {
ExpandError::RecursionOverflowPoisoned => {
f.write_str("overflow expanding the original macro")
}
+ ExpandError::ProcMacroPanic(it) => {
+ f.write_str("proc-macro panicked: ")?;
+ f.write_str(it)
+ }
ExpandError::Other(it) => f.write_str(it),
}
}
}
-/// Input to the analyzer is a set of files, where each file is identified by
-/// `FileId` and contains source code. However, another source of source code in
-/// Rust are macros: each macro can be thought of as producing a "temporary
-/// file". To assign an id to such a file, we use the id of the macro call that
-/// produced the file. So, a `HirFileId` is either a `FileId` (source code
-/// written by user), or a `MacroCallId` (source code produced by macro).
-///
-/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
-/// containing the call plus the offset of the macro call in the file. Note that
-/// this is a recursive definition! However, the size_of of `HirFileId` is
-/// finite (because everything bottoms out at the real `FileId`) and small
-/// (`MacroCallId` uses the location interning. You can check details here:
-/// <https://en.wikipedia.org/wiki/String_interning>).
-///
-/// The two variants are encoded in a single u32 which are differentiated by the MSB.
-/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
-/// `MacroCallId`.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub struct HirFileId(u32);
-
-impl fmt::Debug for HirFileId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.repr().fmt(f)
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroFile {
- pub macro_call_id: MacroCallId,
-}
-
-/// `MacroCallId` identifies a particular macro invocation, like
-/// `println!("Hello, {}", world)`.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct MacroCallId(salsa::InternId);
-impl_intern_key!(MacroCallId);
-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
pub def: MacroDefId,
- pub(crate) krate: CrateId,
+ pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file.
- eager: Option<Box<EagerCallInfo>>,
+ eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind,
+ pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -139,6 +128,7 @@ pub struct MacroDefId {
pub kind: MacroDefKind,
pub local_inner: bool,
pub allow_internal_unsafe: bool,
+ // pub def_site: SyntaxContextId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -152,9 +142,9 @@ pub enum MacroDefKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-struct EagerCallInfo {
+pub struct EagerCallInfo {
/// The expanded argument of the eager macro.
- arg: Arc<(tt::Subtree, TokenMap)>,
+ arg: Arc<tt::Subtree>,
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
arg_id: MacroCallId,
error: Option<ExpandError>,
@@ -178,7 +168,7 @@ pub enum MacroCallKind {
},
Attr {
ast_id: AstId<ast::Item>,
- attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
+ attr_args: Option<Arc<tt::Subtree>>,
/// Syntactical index of the invoking `#[attribute]`.
///
/// Outer attributes are counted first, then inner attributes. This does not support
@@ -187,76 +177,68 @@ pub enum MacroCallKind {
},
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-enum HirFileIdRepr {
- FileId(FileId),
- MacroFile(MacroFile),
-}
+pub trait HirFileIdExt {
+ /// Returns the original file of this macro call hierarchy.
+ fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId;
-impl From<FileId> for HirFileId {
- fn from(FileId(id): FileId) -> Self {
- assert!(id < Self::MAX_FILE_ID);
- HirFileId(id)
- }
-}
+ /// Returns the original file of this macro call hierarchy while going into the included file if
+ /// one of the calls comes from an `include!``.
+ fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId;
-impl From<MacroFile> for HirFileId {
- fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self {
- let id = id.as_u32();
- assert!(id < Self::MAX_FILE_ID);
- HirFileId(id | Self::MACRO_FILE_TAG_MASK)
- }
-}
+ /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
+ fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
+
+ /// Return expansion information if it is a macro-expansion file
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo>;
-impl HirFileId {
- const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
- const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
+ fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase)
+ -> Option<InFile<ast::Attr>>;
+}
- /// For macro-expansion files, returns the file original source file the
- /// expansion originated from.
- pub fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
+impl HirFileIdExt for HirFileId {
+ fn original_file(self, db: &dyn db::ExpandDatabase) -> FileId {
let mut file_id = self;
loop {
match file_id.repr() {
HirFileIdRepr::FileId(id) => break id,
- HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
- let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
- file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
- Some(Ok((_, file))) => file.into(),
- _ => loc.kind.file_id(),
- }
+ HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
+ file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id();
}
}
}
}
- pub fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
- let mut level = 0;
- let mut curr = self;
- while let Some(macro_file) = curr.macro_file() {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
-
- level += 1;
- curr = loc.kind.file_id();
+ fn original_file_respecting_includes(mut self, db: &dyn db::ExpandDatabase) -> FileId {
+ loop {
+ match self.repr() {
+ base_db::span::HirFileIdRepr::FileId(id) => break id,
+ base_db::span::HirFileIdRepr::MacroFile(file) => {
+ let loc = db.lookup_intern_macro_call(file.macro_call_id);
+ if loc.def.is_include() {
+ if let Some(eager) = &loc.eager {
+ if let Ok(it) = builtin_fn_macro::include_input_to_file_id(
+ db,
+ file.macro_call_id,
+ &eager.arg,
+ ) {
+ break it;
+ }
+ }
+ }
+ self = loc.kind.file_id();
+ }
+ }
}
- level
}
- /// If this is a macro call, returns the syntax node of the call.
- pub fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
- let macro_file = self.macro_file()?;
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- Some(loc.to_node(db))
- }
-
- /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
- pub fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<(FileId, SyntaxNode)> {
+ fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
loop {
match call.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)),
- HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ HirFileIdRepr::FileId(file_id) => {
+ break Some(InRealFile { file_id, value: call.value })
+ }
+ HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
call = db.lookup_intern_macro_call(macro_call_id).to_node(db);
}
}
@@ -264,12 +246,11 @@ impl HirFileId {
}
/// Return expansion information if it is a macro-expansion file
- pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
- let macro_file = self.macro_file()?;
- ExpansionInfo::new(db, macro_file)
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
+ Some(ExpansionInfo::new(db, self.macro_file()?))
}
- pub fn as_builtin_derive_attr_node(
+ fn as_builtin_derive_attr_node(
&self,
db: &dyn db::ExpandDatabase,
) -> Option<InFile<ast::Attr>> {
@@ -281,104 +262,84 @@ impl HirFileId {
};
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
}
+}
- pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- matches!(
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
- MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
- )
- }
- None => false,
- }
- }
+pub trait MacroFileIdExt {
+ fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32;
+ /// If this is a macro call, returns the syntax node of the call.
+ fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode>;
- pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- matches!(
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
- MacroDefKind::BuiltInDerive(..)
- )
- }
- None => false,
- }
- }
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo;
+
+ fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
+ fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
/// Return whether this file is an include macro
- pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
- }
- _ => false,
- }
+ fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
+
+ fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool;
+ /// Return whether this file is an attr macro
+ fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
+
+ /// Return whether this file is the pseudo expansion of the derive attribute.
+ /// See [`crate::builtin_attr_macro::derive_attr_expand`].
+ fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool;
+}
+
+impl MacroFileIdExt for MacroFileId {
+ fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
+ db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
}
+ fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
+ let mut level = 0;
+ let mut macro_file = self;
+ loop {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- pub fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
- }
- _ => false,
+ level += 1;
+ macro_file = match loc.kind.file_id().repr() {
+ HirFileIdRepr::FileId(_) => break level,
+ HirFileIdRepr::MacroFile(it) => it,
+ };
}
}
- /// Return whether this file is an attr macro
- pub fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.kind, MacroCallKind::Attr { .. })
- }
- _ => false,
- }
+ /// Return expansion information if it is a macro-expansion file
+ fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo {
+ ExpansionInfo::new(db, self)
}
- /// Return whether this file is the pseudo expansion of the derive attribute.
- /// See [`crate::builtin_attr_macro::derive_attr_expand`].
- pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
- match self.macro_file() {
- Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- loc.def.is_attribute_derive()
- }
- None => false,
- }
+ fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ matches!(
+ db.lookup_intern_macro_call(self.macro_call_id).def.kind,
+ MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ )
}
- #[inline]
- pub fn is_macro(self) -> bool {
- self.0 & Self::MACRO_FILE_TAG_MASK != 0
+ fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ matches!(
+ db.lookup_intern_macro_call(self.macro_call_id).def.kind,
+ MacroDefKind::BuiltInDerive(..)
+ )
}
- #[inline]
- pub fn macro_file(self) -> Option<MacroFile> {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => None,
- _ => Some(MacroFile {
- macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
- }),
- }
+ fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
+ db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
}
- #[inline]
- pub fn file_id(self) -> Option<FileId> {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => Some(FileId(self.0)),
- _ => None,
- }
+ fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
- fn repr(self) -> HirFileIdRepr {
- match self.0 & Self::MACRO_FILE_TAG_MASK {
- 0 => HirFileIdRepr::FileId(FileId(self.0)),
- _ => HirFileIdRepr::MacroFile(MacroFile {
- macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
- }),
- }
+ fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { .. })
+ }
+
+ fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+ loc.def.is_attribute_derive()
}
}
@@ -388,20 +349,35 @@ impl MacroDefId {
db: &dyn db::ExpandDatabase,
krate: CrateId,
kind: MacroCallKind,
+ call_site: SyntaxContextId,
) -> MacroCallId {
- db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
+ db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
+ }
+
+ pub fn definition_range(&self, db: &dyn db::ExpandDatabase) -> InFile<TextRange> {
+ match self.kind {
+ MacroDefKind::Declarative(id)
+ | MacroDefKind::BuiltIn(_, id)
+ | MacroDefKind::BuiltInAttr(_, id)
+ | MacroDefKind::BuiltInDerive(_, id)
+ | MacroDefKind::BuiltInEager(_, id) => {
+ id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
+ }
+ MacroDefKind::ProcMacro(_, _, id) => {
+ id.with_value(db.ast_id_map(id.file_id).get(id.value).text_range())
+ }
+ }
}
pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
- let id = match self.kind {
+ match self.kind {
MacroDefKind::ProcMacro(.., id) => return Either::Right(id),
MacroDefKind::Declarative(id)
| MacroDefKind::BuiltIn(_, id)
| MacroDefKind::BuiltInAttr(_, id)
| MacroDefKind::BuiltInDerive(_, id)
- | MacroDefKind::BuiltInEager(_, id) => id,
- };
- Either::Left(id)
+ | MacroDefKind::BuiltInEager(_, id) => Either::Left(id),
+ }
}
pub fn is_proc_macro(&self) -> bool {
@@ -443,6 +419,18 @@ impl MacroDefId {
}
impl MacroCallLoc {
+ pub fn span(&self, db: &dyn db::ExpandDatabase) -> SpanData {
+ let ast_id = self.kind.erased_ast_id();
+ let file_id = self.kind.file_id();
+ let range = db.ast_id_map(file_id).get_erased(ast_id).text_range();
+ match file_id.repr() {
+ HirFileIdRepr::FileId(file_id) => db.real_span_map(file_id).span_for_range(range),
+ HirFileIdRepr::MacroFile(m) => {
+ db.parse_macro_expansion(m).value.1.span_at(range.start())
+ }
+ }
+ }
+
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
match self.kind {
MacroCallKind::FnLike { ast_id, .. } => {
@@ -451,9 +439,9 @@ impl MacroCallLoc {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(derive_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -464,9 +452,9 @@ impl MacroCallLoc {
if self.def.is_attribute_derive() {
// FIXME: handle `cfg_attr`
ast_id.with_value(ast_id.to_node(db)).map(|it| {
- it.doc_comments_and_attrs()
+ collect_attrs(&it)
.nth(invoc_attr_index.ast_index())
- .and_then(|it| match it {
+ .and_then(|it| match it.1 {
Either::Left(attr) => Some(attr.syntax().clone()),
Either::Right(_) => None,
})
@@ -483,20 +471,26 @@ impl MacroCallLoc {
match self.kind {
MacroCallKind::FnLike { expand_to, .. } => expand_to,
MacroCallKind::Derive { .. } => ExpandTo::Items,
- MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Statements,
+ MacroCallKind::Attr { .. } if self.def.is_attribute_derive() => ExpandTo::Items,
MacroCallKind::Attr { .. } => {
- // is this always correct?
+ // FIXME(stmt_expr_attributes)
ExpandTo::Items
}
}
}
}
-// FIXME: attribute indices do not account for nested `cfg_attr`
-
impl MacroCallKind {
+ fn descr(&self) -> &'static str {
+ match self {
+ MacroCallKind::FnLike { .. } => "macro call",
+ MacroCallKind::Derive { .. } => "derive macro",
+ MacroCallKind::Attr { .. } => "attribute macro",
+ }
+ }
+
/// Returns the file containing the macro invocation.
- fn file_id(&self) -> HirFileId {
+ pub fn file_id(&self) -> HirFileId {
match *self {
MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
| MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
@@ -504,6 +498,14 @@ impl MacroCallKind {
}
}
+ fn erased_ast_id(&self) -> ErasedFileAstId {
+ match *self {
+ MacroCallKind::FnLike { ast_id: InFile { value, .. }, .. } => value.erase(),
+ MacroCallKind::Derive { ast_id: InFile { value, .. }, .. } => value.erase(),
+ MacroCallKind::Attr { ast_id: InFile { value, .. }, .. } => value.erase(),
+ }
+ }
+
/// Returns the original file range that best describes the location of this macro call.
///
/// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
@@ -548,242 +550,179 @@ impl MacroCallKind {
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
// FIXME: should be the range of the macro name, not the whole derive
// FIXME: handle `cfg_attr`
- ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ collect_attrs(&ast_id.to_node(db))
.nth(derive_attr_index.ast_index())
.expect("missing derive")
+ .1
.expect_left("derive is a doc comment?")
.syntax()
.text_range()
}
// FIXME: handle `cfg_attr`
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .expect("missing attribute")
- .expect_left("attribute macro is a doc comment?")
- .syntax()
- .text_range(),
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ collect_attrs(&ast_id.to_node(db))
+ .nth(invoc_attr_index.ast_index())
+ .expect("missing attribute")
+ .1
+ .expect_left("attribute macro is a doc comment?")
+ .syntax()
+ .text_range()
+ }
};
FileRange { range, file_id }
}
- fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
+ fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile<Option<SyntaxNode>> {
match self {
- MacroCallKind::FnLike { ast_id, .. } => ast_id
- .to_in_file_node(db)
- .map(|it| Some(it.token_tree()?.syntax().clone()))
- .transpose(),
+ MacroCallKind::FnLike { ast_id, .. } => {
+ ast_id.to_in_file_node(db).map(|it| Some(it.token_tree()?.syntax().clone()))
+ }
MacroCallKind::Derive { ast_id, .. } => {
- Some(ast_id.to_in_file_node(db).syntax().cloned())
+ ast_id.to_in_file_node(db).syntax().cloned().map(Some)
}
MacroCallKind::Attr { ast_id, .. } => {
- Some(ast_id.to_in_file_node(db).syntax().cloned())
+ ast_id.to_in_file_node(db).syntax().cloned().map(Some)
}
}
}
}
-impl MacroCallId {
- pub fn as_file(self) -> HirFileId {
- MacroFile { macro_call_id: self }.into()
- }
-
- pub fn as_macro_file(self) -> MacroFile {
- MacroFile { macro_call_id: self }
- }
-}
-
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
+// FIXME: can be expensive to create, we should check the use sites and maybe replace them with
+// simpler function calls if the map is only used once
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo {
- expanded: InMacroFile<SyntaxNode>,
+ pub expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes
- arg: InFile<SyntaxNode>,
+ arg: InFile<Option<SyntaxNode>>,
/// The `macro_rules!` or attribute input.
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: TokenExpander,
- macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
- /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
- /// and as such we need to shift tokens if they are part of an attributes input instead of their item.
- macro_arg_shift: mbe::Shift,
- exp_map: Arc<mbe::TokenMap>,
+ macro_arg: Arc<tt::Subtree>,
+ pub exp_map: Arc<ExpansionSpanMap>,
+ arg_map: SpanMap,
}
impl ExpansionInfo {
- pub fn expanded(&self) -> InFile<SyntaxNode> {
- self.expanded.clone().into()
+ pub fn expanded(&self) -> InMacroFile<SyntaxNode> {
+ self.expanded.clone()
}
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
- Some(self.arg.with_value(self.arg.value.parent()?))
+ Some(self.arg.with_value(self.arg.value.as_ref()?.parent()?))
}
- /// Map a token down from macro input into the macro expansion.
- ///
- /// The inner workings of this function differ slightly depending on the type of macro we are dealing with:
- /// - declarative:
- /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input)
- /// , as tokens can mapped in and out of it.
- /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy
- /// way to map all the tokens.
- /// - attribute:
- /// Attributes have two different inputs, the input tokentree in the attribute node and the item
- /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here
- /// as well. Currently this is done by shifting the attribute input by the maximum id of the item.
- /// - function-like and derives:
- /// Both of these only have one simple call site input so no special handling is required here.
- pub fn map_token_down(
- &self,
- db: &dyn db::ExpandDatabase,
- item: Option<ast::Item>,
- token: InFile<&SyntaxToken>,
- // FIXME: use this for range mapping, so that we can resolve inline format args
- _relative_token_offset: Option<TextSize>,
- ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
- assert_eq!(token.file_id, self.arg.file_id);
- let token_id_in_attr_input = if let Some(item) = item {
- // check if we are mapping down in an attribute input
- // this is a special case as attributes can have two inputs
- let call_id = self.expanded.file_id.macro_call_id;
- let loc = db.lookup_intern_macro_call(call_id);
-
- let token_range = token.value.text_range();
- match &loc.kind {
- MacroCallKind::Attr { attr_args, invoc_attr_index, .. } => {
- // FIXME: handle `cfg_attr`
- let attr = item
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?;
- match attr.token_tree() {
- Some(token_tree)
- if token_tree.syntax().text_range().contains_range(token_range) =>
- {
- let attr_input_start =
- token_tree.left_delimiter_token()?.text_range().start();
- let relative_range =
- token.value.text_range().checked_sub(attr_input_start)?;
- // shift by the item's tree's max id
- let token_id = attr_args.1.token_by_range(relative_range)?;
-
- let token_id = if loc.def.is_attribute_derive() {
- // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
- token_id
- } else {
- self.macro_arg_shift.shift(token_id)
- };
- Some(token_id)
- }
- _ => None,
- }
- }
- _ => None,
- }
- } else {
- None
- };
-
- let token_id = match token_id_in_attr_input {
- Some(token_id) => token_id,
- // the token is not inside `an attribute's input so do the lookup in the macro_arg as usual
- None => {
- let relative_range =
- token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
- let token_id = self.macro_arg.1.token_by_range(relative_range)?;
- // conditionally shift the id by a declarative macro definition
- self.macro_def.map_id_down(token_id)
- }
- };
-
+ /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
+ pub fn map_range_down<'a>(
+ &'a self,
+ span: SpanData,
+ ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + 'a>> {
let tokens = self
.exp_map
- .ranges_by_token(token_id, token.value.kind())
+ .ranges_with_span(span)
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
- Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
+ Some(InMacroFile::new(self.expanded.file_id, tokens))
}
- /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
- pub fn map_token_up(
+ /// Looks up the span at the given offset.
+ pub fn span_for_offset(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ offset: TextSize,
+ ) -> (FileRange, SyntaxContextId) {
+ debug_assert!(self.expanded.value.text_range().contains(offset));
+ let span = self.exp_map.span_at(offset);
+ let anchor_offset = db
+ .ast_id_map(span.anchor.file_id.into())
+ .get_erased(span.anchor.ast_id)
+ .text_range()
+ .start();
+ (FileRange { file_id: span.anchor.file_id, range: span.range + anchor_offset }, span.ctx)
+ }
+
+ /// Maps up the text range out of the expansion hierarchy back into the original file its from.
+ pub fn map_node_range_up(
&self,
db: &dyn db::ExpandDatabase,
- token: InFile<&SyntaxToken>,
- ) -> Option<(InFile<SyntaxToken>, Origin)> {
- assert_eq!(token.file_id, self.expanded.file_id.into());
- // Fetch the id through its text range,
- let token_id = self.exp_map.token_by_range(token.value.text_range())?;
- // conditionally unshifting the id to accommodate for macro-rules def site
- let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
-
- let call_id = self.expanded.file_id.macro_call_id;
- let loc = db.lookup_intern_macro_call(call_id);
-
- // Special case: map tokens from `include!` expansions to the included file
- if loc.def.is_include() {
- if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
- let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
- let source = db.parse(file_id);
-
- let token = source.syntax_node().covering_element(range).into_token()?;
-
- return Some((InFile::new(file_id.into(), token), Origin::Call));
+ range: TextRange,
+ ) -> Option<(FileRange, SyntaxContextId)> {
+ debug_assert!(self.expanded.value.text_range().contains_range(range));
+ let mut spans = self.exp_map.spans_for_range(range);
+ let SpanData { range, anchor, ctx } = spans.next()?;
+ let mut start = range.start();
+ let mut end = range.end();
+
+ for span in spans {
+ if span.anchor != anchor || span.ctx != ctx {
+ return None;
}
+ start = start.min(span.range.start());
+ end = end.max(span.range.end());
}
-
- // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
- let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args, .. } => {
- if loc.def.is_attribute_derive() {
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
- } else {
- // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input
- // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
- match self.macro_arg_shift.unshift(token_id) {
- Some(unshifted) => {
- token_id = unshifted;
- (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
- }
- None => (&self.macro_arg.1, self.arg.clone()),
- }
- }
- }
- _ => match origin {
- mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
- mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
- (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
- (&expander.def_site_token_map, tt.syntax().cloned())
- }
- _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
- },
+ let anchor_offset =
+ db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start();
+ Some((
+ FileRange {
+ file_id: anchor.file_id,
+ range: TextRange::new(start, end) + anchor_offset,
},
- };
+ ctx,
+ ))
+ }
- let range = token_map.first_range_by_token(token_id, token.value.kind())?;
- let token =
- tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
- Some((tt.with_value(token), origin))
+ /// Maps up the text range out of the expansion into is macro call.
+ pub fn map_range_up_once(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ token: TextRange,
+ ) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
+ debug_assert!(self.expanded.value.text_range().contains_range(token));
+ let span = self.exp_map.span_at(token.start());
+ match &self.arg_map {
+ SpanMap::RealSpanMap(_) => {
+ let file_id = span.anchor.file_id.into();
+ let anchor_offset =
+ db.ast_id_map(file_id).get_erased(span.anchor.ast_id).text_range().start();
+ InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
+ }
+ SpanMap::ExpansionSpanMap(arg_map) => {
+ let arg_range = self
+ .arg
+ .value
+ .as_ref()
+ .map_or_else(|| TextRange::empty(TextSize::from(0)), |it| it.text_range());
+ InFile::new(
+ self.arg.file_id,
+ arg_map
+ .ranges_with_span(span)
+ .filter(|range| range.intersect(arg_range).is_some())
+ .collect(),
+ )
+ }
+ }
}
- fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
+ pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let arg_tt = loc.kind.arg(db)?;
+ let arg_tt = loc.kind.arg(db);
+ let arg_map = db.span_map(arg_tt.file_id);
let macro_def = db.macro_expander(loc.def);
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
- let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
+ let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
+ (
+ Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: Vec::new(),
+ }),
+ SyntaxFixupUndoInfo::NONE,
+ )
});
let def = loc.def.ast_id().left().and_then(|id| {
@@ -799,342 +738,27 @@ impl ExpansionInfo {
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
// FIXME: handle `cfg_attr`
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
+ let tt = collect_attrs(&ast_id.to_node(db))
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
+ .and_then(|x| Either::left(x.1))?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
- Some(ExpansionInfo {
+ ExpansionInfo {
expanded,
arg: arg_tt,
attr_input_or_mac_def,
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
- })
- }
-}
-
-/// `AstId` points to an AST node in any file.
-///
-/// It is stable across reparses, and can be used as salsa key/value.
-pub type AstId<N> = InFile<FileAstId<N>>;
-
-impl<N: AstIdNode> AstId<N> {
- pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
- self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
- }
- pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
- InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
- }
- pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
- db.ast_id_map(self.file_id).get(self.value)
- }
-}
-
-pub type ErasedAstId = InFile<ErasedFileAstId>;
-
-impl ErasedAstId {
- pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
- db.ast_id_map(self.file_id).get_raw(self.value)
- }
-}
-
-/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
-///
-/// Typical usages are:
-///
-/// * `InFile<SyntaxNode>` -- syntax node in a file
-/// * `InFile<ast::FnDef>` -- ast node in a file
-/// * `InFile<TextSize>` -- offset in a file
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-pub struct InFile<T> {
- pub file_id: HirFileId,
- pub value: T,
-}
-
-impl<T> InFile<T> {
- pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
- InFile { file_id, value }
- }
-
- pub fn with_value<U>(&self, value: U) -> InFile<U> {
- InFile::new(self.file_id, value)
- }
-
- pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
- InFile::new(self.file_id, f(self.value))
- }
-
- pub fn as_ref(&self) -> InFile<&T> {
- self.with_value(&self.value)
- }
-
- pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
- db.parse_or_expand(self.file_id)
- }
-}
-
-impl<T: Clone> InFile<&T> {
- pub fn cloned(&self) -> InFile<T> {
- self.with_value(self.value.clone())
- }
-}
-
-impl<T> InFile<Option<T>> {
- pub fn transpose(self) -> Option<InFile<T>> {
- let value = self.value?;
- Some(InFile::new(self.file_id, value))
- }
-}
-
-impl<L, R> InFile<Either<L, R>> {
- pub fn transpose(self) -> Either<InFile<L>, InFile<R>> {
- match self.value {
- Either::Left(l) => Either::Left(InFile::new(self.file_id, l)),
- Either::Right(r) => Either::Right(InFile::new(self.file_id, r)),
+ arg_map,
}
}
}
-impl InFile<&SyntaxNode> {
- pub fn ancestors_with_macros(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
- iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => node.file_id.call_node(db),
- })
- }
-
- /// Skips the attributed item that caused the macro invocation we are climbing up
- pub fn ancestors_with_macros_skip_attr_item(
- self,
- db: &dyn db::ExpandDatabase,
- ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
- let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
- Some(parent) => Some(node.with_value(parent)),
- None => {
- let parent_node = node.file_id.call_node(db)?;
- if node.file_id.is_attr_macro(db) {
- // macro call was an attributed item, skip it
- // FIXME: does this fail if this is a direct expansion of another macro?
- parent_node.map(|node| node.parent()).transpose()
- } else {
- Some(parent_node)
- }
- }
- };
- iter::successors(succ(&self.cloned()), succ)
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- ///
- /// For attributes and derives, this will point back to the attribute only.
- /// For the entire item use [`InFile::original_file_range_full`].
- pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range(db)
- }
- }
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range_with_body(db)
- }
- }
- }
-
- /// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
- match ascend_node_border_tokens(db, self) {
- Some(InFile { file_id, value: (first, last) }) => {
- let original_file = file_id.original_file(db);
- let range = first.text_range().cover(last.text_range());
- if file_id != original_file.into() {
- tracing::error!("Failed mapping up more for {:?}", range);
- return None;
- }
- Some(FileRange { file_id: original_file, range })
- }
- _ if !self.file_id.is_macro() => Some(FileRange {
- file_id: self.file_id.original_file(db),
- range: self.value.text_range(),
- }),
- _ => None,
- }
- }
-
- pub fn original_syntax_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
- // This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- if !self.file_id.is_macro() {
- return Some(self.map(Clone::clone));
- } else if !self.file_id.is_attr_macro(db) {
- return None;
- }
-
- if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self)
- {
- if file_id.is_macro() {
- let range = first.text_range().cover(last.text_range());
- tracing::error!("Failed mapping out of macro file for {:?}", range);
- return None;
- }
- // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
- let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
- let kind = self.value.kind();
- let value = anc.ancestors().find(|it| it.kind() == kind)?;
- return Some(InFile::new(file_id, value));
- }
- None
- }
-}
-
-impl InFile<SyntaxToken> {
- pub fn upmap(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxToken>> {
- let expansion = self.file_id.expansion_info(db)?;
- expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
- }
-
- /// Falls back to the macro call range if the node cannot be mapped up fully.
- pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
- HirFileIdRepr::MacroFile(mac_file) => {
- if let Some(res) = self.original_file_range_opt(db) {
- return res;
- }
- // Fall back to whole macro call.
- let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
- loc.kind.original_call_range(db)
- }
- }
- }
-
- /// Attempts to map the syntax node back up its macro calls.
- pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
- match self.file_id.repr() {
- HirFileIdRepr::FileId(file_id) => {
- Some(FileRange { file_id, range: self.value.text_range() })
- }
- HirFileIdRepr::MacroFile(_) => {
- let expansion = self.file_id.expansion_info(db)?;
- let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?;
- let original_file = file_id.original_file(db);
- if file_id != original_file.into() {
- return None;
- }
- Some(FileRange { file_id: original_file, range: value.text_range() })
- }
- }
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-pub struct InMacroFile<T> {
- pub file_id: MacroFile,
- pub value: T,
-}
-
-impl<T> From<InMacroFile<T>> for InFile<T> {
- fn from(macro_file: InMacroFile<T>) -> Self {
- InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
- }
-}
-
-fn ascend_node_border_tokens(
- db: &dyn db::ExpandDatabase,
- InFile { file_id, value: node }: InFile<&SyntaxNode>,
-) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
- let expansion = file_id.expansion_info(db)?;
-
- let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next);
- let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev);
-
- // FIXME: Once the token map rewrite is done, this shouldnt need to rely on syntax nodes and tokens anymore
- let first = first_token(node)?;
- let last = last_token(node)?;
- let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?;
- let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?;
- (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value)))
-}
-
-fn ascend_call_token(
- db: &dyn db::ExpandDatabase,
- expansion: &ExpansionInfo,
- token: InFile<SyntaxToken>,
-) -> Option<InFile<SyntaxToken>> {
- let mut mapping = expansion.map_token_up(db, token.as_ref())?;
- while let (mapped, Origin::Call) = mapping {
- match mapped.file_id.expansion_info(db) {
- Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
- None => return Some(mapped),
- }
- }
- None
-}
-
-impl<N: AstNode> InFile<N> {
- pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
- self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
- }
-
- // FIXME: this should return `Option<InFileNotHirFile<N>>`
- pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<N>> {
- // This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
- if !self.file_id.is_macro() {
- return Some(self);
- } else if !self.file_id.is_attr_macro(db) {
- return None;
- }
-
- if let Some(InFile { file_id, value: (first, last) }) =
- ascend_node_border_tokens(db, self.syntax())
- {
- if file_id.is_macro() {
- let range = first.text_range().cover(last.text_range());
- tracing::error!("Failed mapping out of macro file for {:?}", range);
- return None;
- }
- // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
- let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
- let value = anc.ancestors().find_map(N::cast)?;
- return Some(InFile::new(file_id, value));
- }
- None
- }
-
- pub fn syntax(&self) -> InFile<&SyntaxNode> {
- self.with_value(self.value.syntax())
- }
-}
-
/// In Rust, macros expand token trees to token trees. When we want to turn a
/// token tree into an AST node, we need to figure out what kind of AST node we
/// want: something like `foo` can be a type, an expression, or a pattern.
@@ -1199,9 +823,4 @@ impl ExpandTo {
}
}
-#[derive(Debug)]
-pub struct UnresolvedMacro {
- pub path: ModPath,
-}
-
intern::impl_internable!(ModPath, attrs::AttrInput);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index 69aa09c4a..9534b5039 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -7,11 +7,11 @@ use std::{
use crate::{
db::ExpandDatabase,
- hygiene::Hygiene,
- name::{known, Name},
+ hygiene::{marks_rev, SyntaxContextExt, Transparency},
+ name::{known, AsName, Name},
+ span::SpanMapRef,
};
-use base_db::CrateId;
-use either::Either;
+use base_db::{span::SyntaxContextId, CrateId};
use smallvec::SmallVec;
use syntax::{ast, AstNode};
@@ -38,6 +38,7 @@ pub enum PathKind {
Crate,
/// Absolute path (::foo)
Abs,
+ // FIXME: Remove this
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
@@ -46,9 +47,9 @@ impl ModPath {
pub fn from_src(
db: &dyn ExpandDatabase,
path: ast::Path,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
- convert_path(db, None, path, hygiene)
+ convert_path(db, None, path, span_map)
}
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
@@ -193,33 +194,36 @@ fn convert_path(
db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
- hygiene: &Hygiene,
+ span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
let prefix = match path.qualifier() {
- Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
+ Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
None => prefix,
};
let segment = path.segment()?;
let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
- match hygiene.name_ref_to_name(db, name_ref) {
- Either::Left(name) => {
- // no type args in use
- let mut res = prefix.unwrap_or_else(|| {
- ModPath::from_kind(
- segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
- )
- });
- res.segments.push(name);
- res
- }
- Either::Right(crate_id) => {
- return Some(ModPath::from_segments(
- PathKind::DollarCrate(crate_id),
- iter::empty(),
- ))
+ if name_ref.text() == "$crate" {
+ if prefix.is_some() {
+ return None;
}
+ ModPath::from_kind(
+ resolve_crate_root(
+ db,
+ span_map.span_for_range(name_ref.syntax().text_range()).ctx,
+ )
+ .map(PathKind::DollarCrate)
+ .unwrap_or(PathKind::Crate),
+ )
+ } else {
+ let mut res = prefix.unwrap_or_else(|| {
+ ModPath::from_kind(
+ segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
+ )
+ });
+ res.segments.push(name_ref.as_name());
+ res
}
}
ast::PathSegmentKind::SelfTypeKw => {
@@ -261,8 +265,14 @@ fn convert_path(
// We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
- if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
- mod_path.kind = PathKind::DollarCrate(crate_id);
+ let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
+ if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
+ if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
+ mod_path.kind = match resolve_crate_root(db, syn_ctx) {
+ Some(crate_root) => PathKind::DollarCrate(crate_root),
+ None => PathKind::Crate,
+ }
+ }
}
}
}
@@ -270,6 +280,29 @@ fn convert_path(
Some(mod_path)
}
+pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
+ // When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
+ // we don't want to pretend that the `macro_rules!` definition is in the `macro`
+ // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
+ // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
+ // definitions actually produced by `macro` and `macro` definitions produced by
+ // `macro_rules!`, but at least such configurations are not stable yet.
+ ctxt = ctxt.normalize_to_macro_rules(db);
+ let mut iter = marks_rev(ctxt, db).peekable();
+ let mut result_mark = None;
+ // Find the last opaque mark from the end if it exists.
+ while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
+ result_mark = Some(mark);
+ iter.next();
+ }
+ // Then find the last semi-transparent mark from the end if it exists.
+ while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
+ result_mark = Some(mark);
+ }
+
+ result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
+}
+
pub use crate::name as __name;
#[macro_export]
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index a876f48bd..a321f94cd 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -470,6 +470,7 @@ pub mod known {
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
+ pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
#[macro_export]
macro_rules! name {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
index 41675c630..de5777968 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -1,6 +1,6 @@
//! Proc Macro Expander stub
-use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
+use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
@@ -33,11 +33,15 @@ impl ProcMacroExpander {
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id {
- ProcMacroId(DUMMY_ID) => {
- ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
- }
+ ProcMacroId(DUMMY_ID) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
+ ExpandError::UnresolvedProcMacro(def_crate),
+ ),
ProcMacroId(id) => {
let proc_macros = db.proc_macros();
let proc_macros = match proc_macros.get(&def_crate) {
@@ -45,7 +49,7 @@ impl ProcMacroExpander {
Some(Err(_)) | None => {
never!("Non-dummy expander even though there are no proc macros");
return ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@@ -59,7 +63,7 @@ impl ProcMacroExpander {
id
);
return ExpandResult::new(
- tt::Subtree::empty(),
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@@ -68,7 +72,8 @@ impl ProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
- match proc_macro.expander.expand(tt, attr_arg, env) {
+ match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
+ {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes
@@ -78,9 +83,10 @@ impl ProcMacroExpander {
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
}
ProcMacroExpansionError::System(text)
- | ProcMacroExpansionError::Panic(text) => {
- ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
- }
+ | ProcMacroExpansionError::Panic(text) => ExpandResult::new(
+ tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
+ ExpandError::ProcMacroPanic(Box::new(text.into_boxed_str())),
+ ),
},
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
index ab3809abc..acbde26c8 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
@@ -1,5 +1,7 @@
//! A simplified version of quote-crate like quasi quote macro
+use base_db::span::SpanData;
+
// A helper macro quote macro
// FIXME:
// 1. Not all puncts are handled
@@ -8,109 +10,109 @@
#[doc(hidden)]
#[macro_export]
macro_rules! __quote {
- () => {
+ ($span:ident) => {
Vec::<crate::tt::TokenTree>::new()
};
- ( @SUBTREE $delim:ident $($tt:tt)* ) => {
+ ( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
{
- let children = $crate::__quote!($($tt)*);
+ let children = $crate::__quote!($span $($tt)*);
crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
- open: crate::tt::TokenId::unspecified(),
- close: crate::tt::TokenId::unspecified(),
+ open: $span,
+ close: $span,
},
token_trees: $crate::quote::IntoTt::to_tokens(children),
}
}
};
- ( @PUNCT $first:literal ) => {
+ ( @PUNCT($span:ident) $first:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Alone,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
]
}
};
- ( @PUNCT $first:literal, $sec:literal ) => {
+ ( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Joint,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into(),
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $sec,
spacing: crate::tt::Spacing::Alone,
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
]
}
};
// hash variable
- ( # $first:ident $($tail:tt)* ) => {
+ ($span:ident # $first:ident $($tail:tt)* ) => {
{
- let token = $crate::quote::ToTokenTree::to_token($first);
+ let token = $crate::quote::ToTokenTree::to_token($first, $span);
let mut tokens = vec![token.into()];
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
- ( ## $first:ident $($tail:tt)* ) => {
+ ($span:ident ## $first:ident $($tail:tt)* ) => {
{
- let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>();
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
// Brace
- ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
+ ($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
// Bracket
- ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
+ ($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
// Parenthesis
- ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
+ ($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
// Literal
- ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
+ ($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
// Ident
- ( $tt:ident ) => {
+ ($span:ident $tt:ident ) => {
vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(),
- span: crate::tt::TokenId::unspecified(),
+ span: $span,
}).into()
}]
};
// Puncts
// FIXME: Not all puncts are handled
- ( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
- ( & ) => {$crate::__quote!(@PUNCT '&')};
- ( , ) => {$crate::__quote!(@PUNCT ',')};
- ( : ) => {$crate::__quote!(@PUNCT ':')};
- ( ; ) => {$crate::__quote!(@PUNCT ';')};
- ( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
- ( . ) => {$crate::__quote!(@PUNCT '.')};
- ( < ) => {$crate::__quote!(@PUNCT '<')};
- ( > ) => {$crate::__quote!(@PUNCT '>')};
- ( ! ) => {$crate::__quote!(@PUNCT '!')};
-
- ( $first:tt $($tail:tt)+ ) => {
+ ($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
+ ($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
+ ($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
+ ($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
+ ($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
+ ($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
+ ($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
+ ($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
+ ($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
+ ($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
+
+ ($span:ident $first:tt $($tail:tt)+ ) => {
{
- let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
- let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
+ let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
+ let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
@@ -122,19 +124,22 @@ macro_rules! __quote {
/// It probably should implement in proc-macro
#[macro_export]
macro_rules! quote {
- ( $($tt:tt)* ) => {
- $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
+ ($span:ident=> $($tt:tt)* ) => {
+ $crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
}
}
pub(crate) trait IntoTt {
- fn to_subtree(self) -> crate::tt::Subtree;
+ fn to_subtree(self, span: SpanData) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
}
impl IntoTt for Vec<crate::tt::TokenTree> {
- fn to_subtree(self) -> crate::tt::Subtree {
- crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self }
+ fn to_subtree(self, span: SpanData) -> crate::tt::Subtree {
+ crate::tt::Subtree {
+ delimiter: crate::tt::Delimiter::invisible_spanned(span),
+ token_trees: self,
+ }
}
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
@@ -143,7 +148,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
}
impl IntoTt for crate::tt::Subtree {
- fn to_subtree(self) -> crate::tt::Subtree {
+ fn to_subtree(self, _: SpanData) -> crate::tt::Subtree {
self
}
@@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree {
}
pub(crate) trait ToTokenTree {
- fn to_token(self) -> crate::tt::TokenTree;
+ fn to_token(self, span: SpanData) -> crate::tt::TokenTree;
}
impl ToTokenTree for crate::tt::TokenTree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self
}
}
impl ToTokenTree for &crate::tt::TokenTree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.clone()
}
}
impl ToTokenTree for crate::tt::Subtree {
- fn to_token(self) -> crate::tt::TokenTree {
+ fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.into()
}
}
macro_rules! impl_to_to_tokentrees {
- ($($ty:ty => $this:ident $im:block);*) => {
+ ($($span:ident: $ty:ty => $this:ident $im:block);*) => {
$(
impl ToTokenTree for $ty {
- fn to_token($this) -> crate::tt::TokenTree {
+ fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.into();
leaf.into()
}
}
impl ToTokenTree for &$ty {
- fn to_token($this) -> crate::tt::TokenTree {
+ fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into()
}
@@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees {
}
impl_to_to_tokentrees! {
- u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
- crate::tt::Leaf => self { self };
- crate::tt::Literal => self { self };
- crate::tt::Ident => self { self };
- crate::tt::Punct => self { self };
- &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
- String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
+ span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
+ span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} };
+ _span: crate::tt::Leaf => self { self };
+ _span: crate::tt::Literal => self { self };
+ _span: crate::tt::Ident => self { self };
+ _span: crate::tt::Punct => self { self };
+ span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
+ span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}
}
#[cfg(test)]
mod tests {
+ use crate::tt;
+ use base_db::{
+ span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+ };
+ use expect_test::expect;
+ use syntax::{TextRange, TextSize};
+
+ const DUMMY: tt::SpanData = tt::SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
+ ctx: SyntaxContextId::ROOT,
+ };
+
#[test]
fn test_quote_delimiters() {
- assert_eq!(quote!({}).to_string(), "{}");
- assert_eq!(quote!(()).to_string(), "()");
- assert_eq!(quote!([]).to_string(), "[]");
+ assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
+ assert_eq!(quote!(DUMMY =>()).to_string(), "()");
+ assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
}
#[test]
fn test_quote_idents() {
- assert_eq!(quote!(32).to_string(), "32");
- assert_eq!(quote!(struct).to_string(), "struct");
+ assert_eq!(quote!(DUMMY =>32).to_string(), "32");
+ assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
}
#[test]
fn test_quote_hash_simple_literal() {
let a = 20;
- assert_eq!(quote!(#a).to_string(), "20");
+ assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
let s: String = "hello".into();
- assert_eq!(quote!(#s).to_string(), "\"hello\"");
+ assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
}
fn mk_ident(name: &str) -> crate::tt::Ident {
- crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
+ crate::tt::Ident { text: name.into(), span: DUMMY }
}
#[test]
fn test_quote_hash_token_tree() {
let a = mk_ident("hello");
- let quoted = quote!(#a);
+ let quoted = quote!(DUMMY =>#a);
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}");
- assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
+ expect![[r#"
+ SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }
+ IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
}
#[test]
fn test_quote_simple_derive_copy() {
let name = mk_ident("Foo");
- let quoted = quote! {
+ let quoted = quote! {DUMMY =>
impl Clone for #name {
fn clone(&self) -> Self {
Self {}
@@ -268,18 +289,19 @@ mod tests {
// }
let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")];
- let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
+ let fields =
+ fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees);
let list = crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace,
- open: crate::tt::TokenId::unspecified(),
- close: crate::tt::TokenId::unspecified(),
+ open: DUMMY,
+ close: DUMMY,
},
token_trees: fields.collect(),
};
- let quoted = quote! {
+ let quoted = quote! {DUMMY =>
impl Clone for #struct_name {
fn clone(&self) -> Self {
Self #list
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span.rs
new file mode 100644
index 000000000..fe476a40f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/span.rs
@@ -0,0 +1,124 @@
+//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
+//! as associating spans with text ranges in a particular file.
+use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
+ FileId,
+};
+use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
+use triomphe::Arc;
+
+use crate::db::ExpandDatabase;
+
+pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
+
+/// Spanmap for a macro file or a real file
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum SpanMap {
+ /// Spanmap for a macro file
+ ExpansionSpanMap(Arc<ExpansionSpanMap>),
+ /// Spanmap for a real file
+ RealSpanMap(Arc<RealSpanMap>),
+}
+
+#[derive(Copy, Clone)]
+pub enum SpanMapRef<'a> {
+ /// Spanmap for a macro file
+ ExpansionSpanMap(&'a ExpansionSpanMap),
+ /// Spanmap for a real file
+ RealSpanMap(&'a RealSpanMap),
+}
+
+impl mbe::SpanMapper<SpanData> for SpanMap {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+impl mbe::SpanMapper<SpanData> for RealSpanMap {
+ fn span_for(&self, range: TextRange) -> SpanData {
+ self.span_for_range(range)
+ }
+}
+
+impl SpanMap {
+ pub fn span_for_range(&self, range: TextRange) -> SpanData {
+ match self {
+ Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
+ Self::RealSpanMap(span_map) => span_map.span_for_range(range),
+ }
+ }
+
+ pub fn as_ref(&self) -> SpanMapRef<'_> {
+ match self {
+ Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
+ Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
+ }
+ }
+}
+
+impl SpanMapRef<'_> {
+ pub fn span_for_range(self, range: TextRange) -> SpanData {
+ match self {
+ Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
+ Self::RealSpanMap(span_map) => span_map.span_for_range(range),
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Debug)]
+pub struct RealSpanMap {
+ file_id: FileId,
+ /// Invariant: Sorted vec over TextSize
+ // FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
+ pairs: Box<[(TextSize, ErasedFileAstId)]>,
+ end: TextSize,
+}
+
+impl RealSpanMap {
+ /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
+ pub fn absolute(file_id: FileId) -> Self {
+ RealSpanMap {
+ file_id,
+ pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]),
+ end: TextSize::new(!0),
+ }
+ }
+
+ pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
+ let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
+ let ast_id_map = db.ast_id_map(file_id.into());
+ let tree = db.parse(file_id).tree();
+ pairs
+ .extend(tree.items().map(|item| {
+ (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())
+ }));
+ RealSpanMap {
+ file_id,
+ pairs: pairs.into_boxed_slice(),
+ end: tree.syntax().text_range().end(),
+ }
+ }
+
+ pub fn span_for_range(&self, range: TextRange) -> SpanData {
+ assert!(
+ range.end() <= self.end,
+ "range {range:?} goes beyond the end of the file {:?}",
+ self.end
+ );
+ let start = range.start();
+ let idx = self
+ .pairs
+ .binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
+ .unwrap_err();
+ let (offset, ast_id) = self.pairs[idx - 1];
+ SpanData {
+ range: range - offset,
+ anchor: SpanAnchor { file_id: self.file_id, ast_id },
+ ctx: SyntaxContextId::ROOT,
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index b95ae05cc..bbcb76a43 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -13,27 +13,27 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
+itertools.workspace = true
arrayvec = "0.7.2"
-bitflags = "2.1.0"
+bitflags.workspace = true
smallvec.workspace = true
ena = "0.14.0"
-either = "1.7.0"
+either.workspace = true
oorandom = "11.1.3"
-tracing = "0.1.35"
+tracing.workspace = true
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
-chalk-solve = { version = "0.92.0", default-features = false }
-chalk-ir = "0.92.0"
-chalk-recursive = { version = "0.92.0", default-features = false }
-chalk-derive = "0.92.0"
+chalk-solve = { version = "0.95.0", default-features = false }
+chalk-ir = "0.95.0"
+chalk-recursive = { version = "0.95.0", default-features = false }
+chalk-derive = "0.95.0"
la-arena.workspace = true
once_cell = "1.17.0"
triomphe.workspace = true
nohash-hasher.workspace = true
typed-arena = "2.0.1"
-rustc_index.workspace = true
+rustc-dependencies.workspace = true
# local deps
stdx.workspace = true
@@ -47,12 +47,13 @@ limit.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
-tracing = "0.1.35"
-tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "registry",
-] }
-tracing-tree = "0.2.1"
+tracing.workspace = true
+tracing-subscriber.workspace = true
+tracing-tree.workspace = true
project-model = { path = "../project-model" }
# local deps
test-utils.workspace = true
+
+[features]
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index c0b243ea2..c9ab35685 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -28,6 +28,7 @@ pub trait TyExt {
fn is_unknown(&self) -> bool;
fn contains_unknown(&self) -> bool;
fn is_ty_var(&self) -> bool;
+ fn is_union(&self) -> bool;
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
fn as_builtin(&self) -> Option<BuiltinType>;
@@ -96,6 +97,10 @@ impl TyExt for Ty {
matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
}
+ fn is_union(&self) -> bool {
+ matches!(self.adt_id(Interner), Some(AdtId(hir_def::AdtId::UnionId(_))))
+ }
+
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)> {
match self.kind(Interner) {
TyKind::Adt(AdtId(adt), parameters) => Some((*adt, parameters)),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 0348680e5..9792d945e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -1,9 +1,10 @@
//! Constant evaluation details
-use base_db::CrateId;
+use base_db::{salsa::Cycle, CrateId};
use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
use hir_def::{
- hir::Expr,
+ body::Body,
+ hir::{Expr, ExprId},
path::Path,
resolver::{Resolver, ValueNs},
type_ref::LiteralConstRef,
@@ -136,7 +137,7 @@ pub fn intern_const_ref(
ty: Ty,
krate: CrateId,
) -> Const {
- let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate)));
+ let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
let bytes = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
@@ -184,7 +185,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &GeneralConstId,
_: &Substitution,
_: &Option<Arc<TraitEnvironment>>,
@@ -194,7 +195,7 @@ pub(crate) fn const_eval_recover(
pub(crate) fn const_eval_static_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &StaticId,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@@ -202,7 +203,7 @@ pub(crate) fn const_eval_static_recover(
pub(crate) fn const_eval_discriminant_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &EnumVariantId,
) -> Result<i128, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
@@ -280,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant(
// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const(
- expr: Idx<Expr>,
+ expr: ExprId,
mode: ParamLoweringMode,
ctx: &mut InferenceContext<'_>,
args: impl FnOnce() -> Generics,
@@ -288,13 +289,24 @@ pub(crate) fn eval_to_const(
) -> Const {
let db = ctx.db;
let infer = ctx.clone().resolve_all();
+ fn has_closure(body: &Body, expr: ExprId) -> bool {
+ if matches!(body[expr], Expr::Closure { .. }) {
+ return true;
+ }
+ let mut r = false;
+ body[expr].walk_child_exprs(|idx| r |= has_closure(body, idx));
+ r
+ }
+ if has_closure(&ctx.body, expr) {
+ // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
+ return unknown_const(infer[expr].clone());
+ }
if let Expr::Path(p) = &ctx.body.exprs[expr] {
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn, infer[expr].clone()) {
return c;
}
}
- let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
return result;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 7ad3659a4..b395e7f4a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1159,6 +1159,20 @@ fn pattern_matching_slice() {
"#,
33213,
);
+ check_number(
+ r#"
+ //- minicore: slice, index, coerce_unsized, copy
+ const fn f(mut slice: &[u32]) -> usize {
+ slice = match slice {
+ [0, rest @ ..] | rest => rest,
+ };
+ slice.len()
+ }
+ const GOAL: usize = f(&[]) + f(&[10]) + f(&[0, 100])
+ + f(&[1000, 1000, 1000]) + f(&[0, 57, 34, 46, 10000, 10000]);
+ "#,
+ 10,
+ );
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 9c96b5ab8..410bcbf03 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -20,8 +20,8 @@ use crate::{
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError},
Binders, CallableDefId, ClosureId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult,
- Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty,
- TyDefId, ValueTyDefId,
+ Interner, PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution,
+ TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
};
use hir_expand::name::Name;
@@ -47,7 +47,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: DefWithBodyId,
subst: Substitution,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
@@ -55,7 +55,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: ClosureId,
subst: Substitution,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::borrowck_query)]
@@ -81,7 +81,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: GeneralConstId,
subst: Substitution,
- trait_env: Option<Arc<crate::TraitEnvironment>>,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
@@ -104,16 +104,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: AdtId,
subst: Substitution,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
- fn layout_of_ty(
- &self,
- ty: Ty,
- env: Arc<crate::TraitEnvironment>,
- ) -> Result<Arc<Layout>, LayoutError>;
+ fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
@@ -121,7 +117,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
fn lookup_impl_method(
&self,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution);
@@ -149,10 +145,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
- fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<crate::TraitEnvironment>;
+ fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
#[salsa::invoke(crate::lower::trait_environment_query)]
- fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;
+ fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
#[salsa::invoke(crate::lower::generic_defaults_query)]
#[salsa::cycle(crate::lower::generic_defaults_recover)]
@@ -249,7 +245,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
fn normalize_projection(
&self,
projection: crate::ProjectionTy,
- env: Arc<crate::TraitEnvironment>,
+ env: Arc<TraitEnvironment>,
) -> Ty;
#[salsa::invoke(trait_solve_wait)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
index ef43ed5c4..c1b361900 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -11,9 +11,3 @@ pub use crate::diagnostics::{
},
unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
};
-
-#[derive(Debug, PartialEq, Eq)]
-pub struct IncoherentImpl {
- pub file_id: hir_expand::HirFileId,
- pub impl_: syntax::AstPtr<syntax::ast::Impl>,
-}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index 36d69edf9..51a044d8e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -9,6 +9,7 @@
//! - constants (e.g. `const FOO: u8 = 10;`)
//! - static items (e.g. `static FOO: u8 = 10;`)
//! - match arm bindings (e.g. `foo @ Some(_)`)
+//! - modules (e.g. `mod foo { ... }` or `mod foo;`)
mod case_conv;
@@ -18,12 +19,12 @@ use hir_def::{
data::adt::VariantData,
hir::{Pat, PatId},
src::HasSource,
- AdtId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, ItemContainerId,
- Lookup, ModuleDefId, StaticId, StructId,
+ AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
+ StaticId, StructId,
};
use hir_expand::{
name::{AsName, Name},
- HirFileId,
+ HirFileId, MacroFileIdExt,
};
use stdx::{always, never};
use syntax::{
@@ -83,6 +84,7 @@ pub enum IdentType {
Structure,
Variable,
Variant,
+ Module,
}
impl fmt::Display for IdentType {
@@ -97,6 +99,7 @@ impl fmt::Display for IdentType {
IdentType::Structure => "Structure",
IdentType::Variable => "Variable",
IdentType::Variant => "Variant",
+ IdentType::Module => "Module",
};
repr.fmt(f)
@@ -132,6 +135,7 @@ impl<'a> DeclValidator<'a> {
pub(super) fn validate_item(&mut self, item: ModuleDefId) {
match item {
+ ModuleDefId::ModuleId(module_id) => self.validate_module(module_id),
ModuleDefId::FunctionId(func) => self.validate_func(func),
ModuleDefId::AdtId(adt) => self.validate_adt(adt),
ModuleDefId::ConstId(const_id) => self.validate_const(const_id),
@@ -192,7 +196,7 @@ impl<'a> DeclValidator<'a> {
AttrDefId::GenericParamId(_) => None,
}
.map_or(false, |file_id| {
- file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast())
+ matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()))
})
};
@@ -230,6 +234,55 @@ impl<'a> DeclValidator<'a> {
|| parent()
}
+ fn validate_module(&mut self, module_id: ModuleId) {
+ // Check whether non-snake case identifiers are allowed for this module.
+ if self.allowed(module_id.into(), allow::NON_SNAKE_CASE, false) {
+ return;
+ }
+
+ // Check the module name.
+ let Some(module_name) = module_id.name(self.db.upcast()) else { return };
+ let module_name_replacement =
+ module_name.as_str().and_then(to_lower_snake_case).map(|new_name| Replacement {
+ current_name: module_name,
+ suggested_text: new_name,
+ expected_case: CaseType::LowerSnakeCase,
+ });
+
+ if let Some(module_name_replacement) = module_name_replacement {
+ let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id];
+ let module_src = module_data.declaration_source(self.db.upcast());
+
+ if let Some(module_src) = module_src {
+ let ast_ptr = match module_src.value.name() {
+ Some(name) => name,
+ None => {
+ never!(
+ "Replacement ({:?}) was generated for a module without a name: {:?}",
+ module_name_replacement,
+ module_src
+ );
+ return;
+ }
+ };
+
+ let diagnostic = IncorrectCase {
+ file: module_src.file_id,
+ ident_type: IdentType::Module,
+ ident: AstPtr::new(&ast_ptr),
+ expected_case: module_name_replacement.expected_case,
+ ident_text: module_name_replacement
+ .current_name
+ .display(self.db.upcast())
+ .to_string(),
+ suggested_text: module_name_replacement.suggested_text,
+ };
+
+ self.sink.push(diagnostic);
+ }
+ }
+ }
+
fn validate_func(&mut self, func: FunctionId) {
let data = self.db.function_data(func);
if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) {
@@ -237,8 +290,6 @@ impl<'a> DeclValidator<'a> {
return;
}
- self.validate_body_inner_items(func.into());
-
// Check whether non-snake case identifiers are allowed for this function.
if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
return;
@@ -336,48 +387,44 @@ impl<'a> DeclValidator<'a> {
for (id, replacement) in pats_replacements {
if let Ok(source_ptr) = source_map.pat_syntax(id) {
- if let Some(expr) = source_ptr.value.as_ref().left() {
+ if let Some(ptr) = source_ptr.value.clone().cast::<ast::IdentPat>() {
let root = source_ptr.file_syntax(self.db.upcast());
- if let ast::Pat::IdentPat(ident_pat) = expr.to_node(&root) {
- let parent = match ident_pat.syntax().parent() {
- Some(parent) => parent,
- None => continue,
- };
- let name_ast = match ident_pat.name() {
- Some(name_ast) => name_ast,
- None => continue,
- };
+ let ident_pat = ptr.to_node(&root);
+ let parent = match ident_pat.syntax().parent() {
+ Some(parent) => parent,
+ None => continue,
+ };
+ let name_ast = match ident_pat.name() {
+ Some(name_ast) => name_ast,
+ None => continue,
+ };
+
+ let is_param = ast::Param::can_cast(parent.kind());
+
+ // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
+ // because e.g. match arms are patterns as well.
+ // In other words, we check that it's a named variable binding.
+ let is_binding = ast::LetStmt::can_cast(parent.kind())
+ || (ast::MatchArm::can_cast(parent.kind())
+ && ident_pat.at_token().is_some());
+ if !(is_param || is_binding) {
+ // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
+ continue;
+ }
- let is_param = ast::Param::can_cast(parent.kind());
-
- // We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
- // because e.g. match arms are patterns as well.
- // In other words, we check that it's a named variable binding.
- let is_binding = ast::LetStmt::can_cast(parent.kind())
- || (ast::MatchArm::can_cast(parent.kind())
- && ident_pat.at_token().is_some());
- if !(is_param || is_binding) {
- // This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
- continue;
- }
+ let ident_type =
+ if is_param { IdentType::Parameter } else { IdentType::Variable };
- let ident_type =
- if is_param { IdentType::Parameter } else { IdentType::Variable };
-
- let diagnostic = IncorrectCase {
- file: source_ptr.file_id,
- ident_type,
- ident: AstPtr::new(&name_ast),
- expected_case: replacement.expected_case,
- ident_text: replacement
- .current_name
- .display(self.db.upcast())
- .to_string(),
- suggested_text: replacement.suggested_text,
- };
+ let diagnostic = IncorrectCase {
+ file: source_ptr.file_id,
+ ident_type,
+ ident: AstPtr::new(&name_ast),
+ expected_case: replacement.expected_case,
+ ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
+ suggested_text: replacement.suggested_text,
+ };
- self.sink.push(diagnostic);
- }
+ self.sink.push(diagnostic);
}
}
}
@@ -519,11 +566,6 @@ impl<'a> DeclValidator<'a> {
fn validate_enum(&mut self, enum_id: EnumId) {
let data = self.db.enum_data(enum_id);
- for (local_id, _) in data.variants.iter() {
- let variant_id = EnumVariantId { parent: enum_id, local_id };
- self.validate_body_inner_items(variant_id.into());
- }
-
// Check whether non-camel case names are allowed for this enum.
if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
@@ -648,8 +690,6 @@ impl<'a> DeclValidator<'a> {
fn validate_const(&mut self, const_id: ConstId) {
let data = self.db.const_data(const_id);
- self.validate_body_inner_items(const_id.into());
-
if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
@@ -698,8 +738,6 @@ impl<'a> DeclValidator<'a> {
return;
}
- self.validate_body_inner_items(static_id.into());
-
if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
@@ -737,17 +775,4 @@ impl<'a> DeclValidator<'a> {
self.sink.push(diagnostic);
}
-
- // FIXME: We don't currently validate names within `DefWithBodyId::InTypeConstId`.
- /// Recursively validates inner scope items, such as static variables and constants.
- fn validate_body_inner_items(&mut self, body_id: DefWithBodyId) {
- let body = self.db.body(body_id);
- for (_, block_def_map) in body.blocks(self.db.upcast()) {
- for (_, module) in block_def_map.modules() {
- for def_id in module.scope.declarations() {
- self.validate_item(def_id);
- }
- }
- }
- }
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
index 2c1368962..cbe1af157 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -11,50 +11,7 @@ pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
return None;
}
- // Taken from rustc.
- let ret = ident
- .trim_matches('_')
- .split('_')
- .filter(|component| !component.is_empty())
- .map(|component| {
- let mut camel_cased_component = String::with_capacity(component.len());
-
- let mut new_word = true;
- let mut prev_is_lower_case = true;
-
- for c in component.chars() {
- // Preserve the case if an uppercase letter follows a lowercase letter, so that
- // `camelCase` is converted to `CamelCase`.
- if prev_is_lower_case && c.is_uppercase() {
- new_word = true;
- }
-
- if new_word {
- camel_cased_component.extend(c.to_uppercase());
- } else {
- camel_cased_component.extend(c.to_lowercase());
- }
-
- prev_is_lower_case = c.is_lowercase();
- new_word = false;
- }
-
- camel_cased_component
- })
- .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
- // separate two components with an underscore if their boundary cannot
- // be distinguished using an uppercase/lowercase case distinction
- let join = prev
- .and_then(|prev| {
- let f = next.chars().next()?;
- let l = prev.chars().last()?;
- Some(!char_has_case(l) && !char_has_case(f))
- })
- .unwrap_or(false);
- (acc + if join { "_" } else { "" } + &next, Some(next))
- })
- .0;
- Some(ret)
+ Some(stdx::to_camel_case(ident))
}
/// Converts an identifier to a lower_snake_case form.
@@ -97,7 +54,9 @@ fn is_camel_case(name: &str) -> bool {
&& !name.chars().any(|snd| {
let ret = match fst {
None => false,
- Some(fst) => char_has_case(fst) && snd == '_' || char_has_case(snd) && fst == '_',
+ Some(fst) => {
+ stdx::char_has_case(fst) && snd == '_' || stdx::char_has_case(snd) && fst == '_'
+ }
};
fst = Some(snd);
@@ -135,11 +94,6 @@ fn is_snake_case<F: Fn(char) -> bool>(ident: &str, wrong_case: F) -> bool {
})
}
-// Taken from rustc.
-fn char_has_case(c: char) -> bool {
- c.is_lowercase() || c.is_uppercase()
-}
-
#[cfg(test)]
mod tests {
use super::*;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
index f8cdeaa5e..2e04bbfee 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -147,7 +147,7 @@ impl<'a> PatCtxt<'a> {
}
hir_def::hir::Pat::Bind { id, subpat, .. } => {
- let bm = self.infer.binding_modes[id];
+ let bm = self.infer.binding_modes[pat];
ty = &self.infer[id];
let name = &self.body.bindings[id].name;
match (bm, ty.kind(Interner)) {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index f6d6b00d7..d81926f7c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -23,7 +23,7 @@ use hir_def::{
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId,
};
-use hir_expand::{hygiene::Hygiene, name::Name};
+use hir_expand::name::Name;
use intern::{Internable, Interned};
use itertools::Itertools;
use la_arena::ArenaMap;
@@ -448,9 +448,8 @@ fn render_const_scalar(
) -> Result<(), HirDisplayError> {
// FIXME: We need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`.
- let trait_env = Arc::new(TraitEnvironment::empty(
- *f.db.crate_graph().crates_in_topological_order().last().unwrap(),
- ));
+ let trait_env =
+ TraitEnvironment::empty(*f.db.crate_graph().crates_in_topological_order().last().unwrap());
match ty.kind(Interner) {
TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
@@ -945,6 +944,7 @@ impl HirDisplay for Ty {
ItemInNs::Types((*def_id).into()),
module_id,
false,
+ true,
) {
write!(f, "{}", path.display(f.db.upcast()))?;
} else {
@@ -1731,13 +1731,13 @@ impl HirDisplay for TypeRef {
f.write_joined(bounds, " + ")?;
}
TypeRef::Macro(macro_call) => {
- let macro_call = macro_call.to_node(f.db.upcast());
- let ctx = hir_def::lower::LowerCtx::with_hygiene(
+ let ctx = hir_def::lower::LowerCtx::with_span_map(
f.db.upcast(),
- &Hygiene::new_unhygienic(),
+ f.db.span_map(macro_call.file_id),
);
+ let macro_call = macro_call.to_node(f.db.upcast());
match macro_call.path() {
- Some(path) => match Path::from_src(path, &ctx) {
+ Some(path) => match Path::from_src(&ctx, path) {
Some(path) => path.hir_fmt(f)?,
None => write!(f, "{{macro}}")?,
},
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 78d3c667a..6f724e458 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -113,7 +113,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
// FIXME(const-generic-body): We should not get the return type in this way.
ctx.return_ty = c
.lookup(db.upcast())
- .thing
+ .expected_ty
.box_any()
.downcast::<InTypeConstIdMetadata>()
.unwrap()
@@ -420,7 +420,19 @@ pub struct InferenceResult {
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
- pub binding_modes: ArenaMap<BindingId, BindingMode>,
+ /// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
+ ///
+ /// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
+ /// or pattern can have multiple binding modes. For example:
+ /// ```
+ /// fn foo(mut slice: &[u32]) -> usize {
+ /// slice = match slice {
+ /// [0, rest @ ..] | rest => rest,
+ /// };
+ /// }
+ /// ```
+ /// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
+ pub binding_modes: ArenaMap<PatId, BindingMode>,
pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
@@ -1140,20 +1152,15 @@ impl<'a> InferenceContext<'a> {
(ty, variant)
}
TypeNs::TypeAliasId(it) => {
- let container = it.lookup(self.db.upcast()).container;
- let parent_subst = match container {
- ItemContainerId::TraitId(id) => {
- let subst = TyBuilder::subst_for_def(self.db, id, None)
- .fill_with_inference_vars(&mut self.table)
- .build();
- Some(subst)
- }
- // Type aliases do not exist in impls.
- _ => None,
+ let resolved_seg = match unresolved {
+ None => path.segments().last().unwrap(),
+ Some(n) => path.segments().get(path.segments().len() - n - 1).unwrap(),
};
- let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst)
- .fill_with_inference_vars(&mut self.table)
- .build();
+ let substs =
+ ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None);
+ let ty = self.db.ty(it.into());
+ let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
+
self.resolve_variant_on_alias(ty, unresolved, mod_path)
}
TypeNs::AdtSelfType(_) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index 13d6b5643..af74df103 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -679,7 +679,7 @@ impl InferenceContext<'_> {
| Pat::Range { .. } => {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
}
- Pat::Bind { id, .. } => match self.result.binding_modes[*id] {
+ Pat::Bind { id, .. } => match self.result.binding_modes[p] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.type_of_binding[*id].clone()) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
@@ -735,6 +735,32 @@ impl InferenceContext<'_> {
self.walk_expr(expr);
}
+ fn restrict_precision_for_unsafe(&mut self) {
+ for capture in &mut self.current_captures {
+ let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
+ if ty.as_raw_ptr().is_some() || ty.is_union() {
+ capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
+ capture.place.projections.truncate(0);
+ continue;
+ }
+ for (i, p) in capture.place.projections.iter().enumerate() {
+ ty = p.projected_ty(
+ ty,
+ self.db,
+ |_, _, _| {
+ unreachable!("Closure field only happens in MIR");
+ },
+ self.owner.module(self.db.upcast()).krate(),
+ );
+ if ty.as_raw_ptr().is_some() || ty.is_union() {
+ capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
+ capture.place.projections.truncate(i + 1);
+ break;
+ }
+ }
+ }
+ }
+
fn adjust_for_move_closure(&mut self) {
for capture in &mut self.current_captures {
if let Some(first_deref) =
@@ -838,8 +864,8 @@ impl InferenceContext<'_> {
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_) => self.consume_place(place, pat.into()),
- Pat::Bind { id, subpat: _ } => {
- let mode = self.result.binding_modes[*id];
+ Pat::Bind { id: _, subpat: _ } => {
+ let mode = self.result.binding_modes[pat];
let capture_kind = match mode {
BindingMode::Move => {
self.consume_place(place, pat.into());
@@ -924,6 +950,7 @@ impl InferenceContext<'_> {
self.result.mutated_bindings_in_closure.insert(item.place.local);
}
}
+ self.restrict_precision_for_unsafe();
// closure_kind should be done before adjust_for_move_closure
let closure_kind = self.closure_kind();
match capture_by {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 0c3c725a7..a5e77a12d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -18,7 +18,6 @@ use hir_def::{
use hir_expand::name::{name, Name};
use stdx::always;
use syntax::ast::RangeOp;
-use triomphe::Arc;
use crate::{
autoderef::{builtin_deref, deref_by_trait, Autoderef},
@@ -40,7 +39,8 @@ use crate::{
traits::FnTrait,
utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst,
- Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+ Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
+ TyKind,
};
use super::{
@@ -579,7 +579,7 @@ impl InferenceContext<'_> {
}
ty
}
- Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name),
+ Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name, expected),
Expr::Await { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
@@ -1291,7 +1291,7 @@ impl InferenceContext<'_> {
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
let prev_env = block_id.map(|block_id| {
let prev_env = self.table.trait_env.clone();
- Arc::make_mut(&mut self.table.trait_env).block = Some(block_id);
+ TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
prev_env
});
@@ -1456,7 +1456,13 @@ impl InferenceContext<'_> {
})
}
- fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
+ fn infer_field_access(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ name: &Name,
+ expected: &Expectation,
+ ) -> Ty {
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
if name.is_missing() {
@@ -1482,28 +1488,42 @@ impl InferenceContext<'_> {
ty
}
None => {
- // no field found,
- let method_with_same_name_exists = {
- self.get_traits_in_scope();
-
- let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
- method_resolution::lookup_method(
- self.db,
- &canonicalized_receiver.value,
- self.table.trait_env.clone(),
- self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
- VisibleFromModule::Filter(self.resolver.module()),
- name,
- )
- .is_some()
- };
+ // no field found, lets attempt to resolve it like a function so that IDE things
+ // work out while people are typing
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+ let resolved = method_resolution::lookup_method(
+ self.db,
+ &canonicalized_receiver.value,
+ self.table.trait_env.clone(),
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
+ VisibleFromModule::Filter(self.resolver.module()),
+ name,
+ );
self.result.diagnostics.push(InferenceDiagnostic::UnresolvedField {
expr: tgt_expr,
- receiver: receiver_ty,
+ receiver: receiver_ty.clone(),
name: name.clone(),
- method_with_same_name_exists,
+ method_with_same_name_exists: resolved.is_some(),
});
- self.err_ty()
+ match resolved {
+ Some((adjust, func, _)) => {
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, None);
+ self.write_expr_adj(receiver, adjustments);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+
+ self.check_method_call(
+ tgt_expr,
+ &[],
+ self.db.value_ty(func.into()),
+ substs,
+ ty,
+ expected,
+ )
+ }
+ None => self.err_ty(),
+ }
}
}
}
@@ -1517,7 +1537,7 @@ impl InferenceContext<'_> {
generic_args: Option<&GenericArgs>,
expected: &Expectation,
) -> Ty {
- let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
let resolved = method_resolution::lookup_method(
@@ -1568,23 +1588,32 @@ impl InferenceContext<'_> {
)
}
};
+ self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected)
+ }
+
+ fn check_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ args: &[ExprId],
+ method_ty: Binders<Ty>,
+ substs: Substitution,
+ receiver_ty: Ty,
+ expected: &Expectation,
+ ) -> Ty {
let method_ty = method_ty.substitute(Interner, &substs);
self.register_obligations_for_call(&method_ty);
- let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+ let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) =
match method_ty.callable_sig(self.db) {
- Some(sig) => {
+ Some(sig) => (
if !sig.params().is_empty() {
- (
- sig.params()[0].clone(),
- sig.params()[1..].to_vec(),
- sig.ret().clone(),
- sig.is_varargs,
- )
+ (sig.params()[0].clone(), sig.params()[1..].to_vec())
} else {
- (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
- }
- }
- None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+ (self.err_ty(), Vec::new())
+ },
+ sig.ret().clone(),
+ sig.is_varargs,
+ ),
+ None => ((self.err_ty(), Vec::new()), self.err_ty(), true),
};
self.unify(&formal_receiver_ty, &receiver_ty);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index 4e28ec060..acdb54028 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -262,7 +262,7 @@ impl InferenceContext<'_> {
fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
let mut expected = self.resolve_ty_shallow(expected);
- if is_non_ref_pat(self.body, pat) {
+ if self.is_non_ref_pat(self.body, pat) {
let mut pat_adjustments = Vec::new();
while let Some((inner, _lifetime, mutability)) = expected.as_reference() {
pat_adjustments.push(expected.clone());
@@ -421,7 +421,7 @@ impl InferenceContext<'_> {
} else {
BindingMode::convert(mode)
};
- self.result.binding_modes.insert(binding, mode);
+ self.result.binding_modes.insert(pat, mode);
let inner_ty = match subpat {
Some(subpat) => self.infer_pat(subpat, &expected, default_bm),
@@ -496,24 +496,28 @@ impl InferenceContext<'_> {
self.infer_expr(expr, &Expectation::has_type(expected.clone()))
}
-}
-fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
- match &body[pat] {
- Pat::Tuple { .. }
- | Pat::TupleStruct { .. }
- | Pat::Record { .. }
- | Pat::Range { .. }
- | Pat::Slice { .. } => true,
- Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
- // FIXME: ConstBlock/Path/Lit might actually evaluate to ref, but inference is unimplemented.
- Pat::Path(..) => true,
- Pat::ConstBlock(..) => true,
- Pat::Lit(expr) => !matches!(
- body[*expr],
- Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
- ),
- Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
+ fn is_non_ref_pat(&mut self, body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)),
+ Pat::Path(p) => {
+ let v = self.resolve_value_path_inner(p, pat.into());
+ v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_)))
+ }
+ Pat::ConstBlock(..) => false,
+ Pat::Lit(expr) => !matches!(
+ body[*expr],
+ Expr::Literal(Literal::String(..) | Literal::CString(..) | Literal::ByteString(..))
+ ),
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => {
+ false
+ }
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
index c6bbf2f61..49fb78f67 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -40,33 +40,7 @@ impl InferenceContext<'_> {
}
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
- let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
- let last = path.segments().last()?;
-
- // Don't use `self.make_ty()` here as we need `orig_ns`.
- let ctx =
- crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
- let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
- let ty = self.table.insert_type_vars(ty);
- let ty = self.table.normalize_associated_types_in(ty);
-
- let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
- let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
- let ty = self.table.insert_type_vars(ty);
- let ty = self.table.normalize_associated_types_in(ty);
- self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
- } else {
- // FIXME: report error, unresolved first path segment
- let value_or_partial =
- self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
-
- match value_or_partial {
- ResolveValueResult::ValueNs(it, _) => (it, None),
- ResolveValueResult::Partial(def, remaining_index, _) => self
- .resolve_assoc_item(def, path, remaining_index, id)
- .map(|(it, substs)| (it, Some(substs)))?,
- }
- };
+ let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
let value_def = match value {
ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
@@ -144,6 +118,41 @@ impl InferenceContext<'_> {
Some(ValuePathResolution::GenericDef(value_def, generic_def, substs))
}
+ pub(super) fn resolve_value_path_inner(
+ &mut self,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ let last = path.segments().last()?;
+
+ // Don't use `self.make_ty()` here as we need `orig_ns`.
+ let ctx =
+ crate::lower::TyLoweringContext::new(self.db, &self.resolver, self.owner.into());
+ let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
+ let ty = self.table.insert_type_vars(ty);
+ let ty = self.table.normalize_associated_types_in(ty);
+
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
+ let ty = self.table.insert_type_vars(ty);
+ let ty = self.table.normalize_associated_types_in(ty);
+ self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
+ } else {
+ // FIXME: report error, unresolved first path segment
+ let value_or_partial =
+ self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it, _) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index, _) => self
+ .resolve_assoc_item(def, path, remaining_index, id)
+ .map(|(it, substs)| (it, Some(substs)))?,
+ }
+ };
+ Some((value, self_subst))
+ }
+
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
let predicates = self.db.generic_predicates(def);
for predicate in predicates.iter() {
@@ -390,6 +399,7 @@ impl InferenceContext<'_> {
}
}
+#[derive(Debug)]
enum ValuePathResolution {
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
// conversion between them + `unwrap()`.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index 0a68a9f3b..ac39bdf5b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -43,7 +43,7 @@ where
}
impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
- pub(super) fn apply_solution(
+ pub(crate) fn apply_solution(
&self,
ctx: &mut InferenceTable<'_>,
solution: Canonical<Substitution>,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index 1a6106c02..bfc4f1383 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -1,5 +1,8 @@
//! Compute the binary representation of a type
+use std::fmt;
+
+use base_db::salsa::Cycle;
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{
layout::{
@@ -9,6 +12,10 @@ use hir_def::{
LocalEnumVariantId, LocalFieldId, StructId,
};
use la_arena::{Idx, RawIdx};
+use rustc_dependencies::{
+ abi::AddressSpace,
+ index::{IndexSlice, IndexVec},
+};
use stdx::never;
use triomphe::Arc;
@@ -22,19 +29,13 @@ pub use self::{
target::target_data_layout_query,
};
-macro_rules! user_error {
- ($it: expr) => {
- return Err(LayoutError::UserError(format!($it).into()))
- };
-}
-
mod adt;
mod target;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct RustcEnumVariantIdx(pub LocalEnumVariantId);
-impl rustc_index::vec::Idx for RustcEnumVariantIdx {
+impl rustc_dependencies::index::Idx for RustcEnumVariantIdx {
fn new(idx: usize) -> Self {
RustcEnumVariantIdx(Idx::from_raw(RawIdx::from(idx as u32)))
}
@@ -44,19 +45,63 @@ impl rustc_index::vec::Idx for RustcEnumVariantIdx {
}
}
-pub type Layout = LayoutS<RustcEnumVariantIdx>;
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct RustcFieldIdx(pub LocalFieldId);
+
+impl RustcFieldIdx {
+ pub fn new(idx: usize) -> Self {
+ RustcFieldIdx(Idx::from_raw(RawIdx::from(idx as u32)))
+ }
+}
+
+impl rustc_dependencies::index::Idx for RustcFieldIdx {
+ fn new(idx: usize) -> Self {
+ RustcFieldIdx(Idx::from_raw(RawIdx::from(idx as u32)))
+ }
+
+ fn index(self) -> usize {
+ u32::from(self.0.into_raw()) as usize
+ }
+}
+
+pub type Layout = LayoutS<RustcFieldIdx, RustcEnumVariantIdx>;
pub type TagEncoding = hir_def::layout::TagEncoding<RustcEnumVariantIdx>;
-pub type Variants = hir_def::layout::Variants<RustcEnumVariantIdx>;
+pub type Variants = hir_def::layout::Variants<RustcFieldIdx, RustcEnumVariantIdx>;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LayoutError {
- UserError(Box<str>),
- SizeOverflow,
- TargetLayoutNotAvailable,
- HasPlaceholder,
+ HasErrorConst,
HasErrorType,
+ HasPlaceholder,
+ InvalidSimdType,
NotImplemented,
+ RecursiveTypeWithoutIndirection,
+ SizeOverflow,
+ TargetLayoutNotAvailable,
Unknown,
+ UserReprTooSmall,
+}
+
+impl std::error::Error for LayoutError {}
+impl fmt::Display for LayoutError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ LayoutError::HasErrorConst => write!(f, "type contains an unevaluatable const"),
+ LayoutError::HasErrorType => write!(f, "type contains an error"),
+ LayoutError::HasPlaceholder => write!(f, "type contains placeholders"),
+ LayoutError::InvalidSimdType => write!(f, "invalid simd type definition"),
+ LayoutError::NotImplemented => write!(f, "not implemented"),
+ LayoutError::RecursiveTypeWithoutIndirection => {
+ write!(f, "recursive type without indirection")
+ }
+ LayoutError::SizeOverflow => write!(f, "size overflow"),
+ LayoutError::TargetLayoutNotAvailable => write!(f, "target layout not available"),
+ LayoutError::Unknown => write!(f, "unknown"),
+ LayoutError::UserReprTooSmall => {
+ write!(f, "the `#[repr]` hint is too small to hold the discriminants of the enum")
+ }
+ }
+ }
}
struct LayoutCx<'a> {
@@ -66,7 +111,7 @@ struct LayoutCx<'a> {
impl<'a> LayoutCalculator for LayoutCx<'a> {
type TargetDataLayoutRef = &'a TargetDataLayout;
- fn delay_bug(&self, txt: &str) {
+ fn delayed_bug(&self, txt: String) {
never!("{}", txt);
}
@@ -95,9 +140,7 @@ fn layout_of_simd_ty(
let f0_ty = match fields.iter().next() {
Some(it) => it.1.clone().substitute(Interner, subst),
- None => {
- user_error!("simd type with zero fields");
- }
+ None => return Err(LayoutError::InvalidSimdType),
};
// The element type and number of elements of the SIMD vector
@@ -111,7 +154,7 @@ fn layout_of_simd_ty(
// Extract the number of elements from the layout of the array field:
let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields
else {
- user_error!("Array with non array layout");
+ return Err(LayoutError::Unknown);
};
(e_ty.clone(), count, true)
@@ -123,7 +166,7 @@ fn layout_of_simd_ty(
// Compute the ABI of the element type:
let e_ly = db.layout_of_ty(e_ty, env.clone())?;
let Abi::Scalar(e_abi) = e_ly.abi else {
- user_error!("simd type with inner non scalar type");
+ return Err(LayoutError::Unknown);
};
// Compute the size and alignment of the vector:
@@ -145,6 +188,8 @@ fn layout_of_simd_ty(
largest_niche: e_ly.largest_niche,
size,
align,
+ max_repr_align: None,
+ unadjusted_abi_align: align.abi,
}))
}
@@ -230,13 +275,11 @@ pub fn layout_of_ty_query(
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
- let fields = fields.iter().collect::<Vec<_>>();
+ let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
- let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(Box::from(
- "unevaluated or mistyped const generic parameter",
- )))? as u64;
+ let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
@@ -255,6 +298,8 @@ pub fn layout_of_ty_query(
largest_niche,
align: element.align,
size,
+ max_repr_align: None,
+ unadjusted_abi_align: element.align.abi,
}
}
TyKind::Slice(element) => {
@@ -266,11 +311,23 @@ pub fn layout_of_ty_query(
largest_niche: None,
align: element.align,
size: Size::ZERO,
+ max_repr_align: None,
+ unadjusted_abi_align: element.align.abi,
}
}
+ TyKind::Str => Layout {
+ variants: Variants::Single { index: struct_variant_idx() },
+ fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
+ abi: Abi::Aggregate { sized: false },
+ largest_niche: None,
+ align: dl.i8_align,
+ size: Size::ZERO,
+ max_repr_align: None,
+ unadjusted_abi_align: dl.i8_align.abi,
+ },
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
- let mut data_ptr = scalar_unit(dl, Primitive::Pointer);
+ let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
if matches!(ty.kind(Interner), TyKind::Ref(..)) {
data_ptr.valid_range_mut().start = 1;
}
@@ -294,7 +351,7 @@ pub fn layout_of_ty_query(
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
}
TyKind::Dyn(..) => {
- let mut vtable = scalar_unit(dl, Primitive::Pointer);
+ let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
vtable.valid_range_mut().start = 1;
vtable
}
@@ -308,32 +365,17 @@ pub fn layout_of_ty_query(
cx.scalar_pair(data_ptr, metadata)
}
TyKind::FnDef(_, _) => layout_of_unit(&cx, dl)?,
- TyKind::Str => Layout {
- variants: Variants::Single { index: struct_variant_idx() },
- fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
- abi: Abi::Aggregate { sized: false },
- largest_niche: None,
- align: dl.i8_align,
- size: Size::ZERO,
- },
- TyKind::Never => Layout {
- variants: Variants::Single { index: struct_variant_idx() },
- fields: FieldsShape::Primitive,
- abi: Abi::Uninhabited,
- largest_niche: None,
- align: dl.i8_align,
- size: Size::ZERO,
- },
+ TyKind::Never => cx.layout_of_never_type(),
TyKind::Dyn(_) | TyKind::Foreign(_) => {
let mut unit = layout_of_unit(&cx, dl)?;
match unit.abi {
Abi::Aggregate { ref mut sized } => *sized = false,
- _ => user_error!("bug"),
+ _ => return Err(LayoutError::Unknown),
}
unit
}
TyKind::Function(_) => {
- let mut ptr = scalar_unit(dl, Primitive::Pointer);
+ let mut ptr = scalar_unit(dl, Primitive::Pointer(dl.instruction_address_space));
ptr.valid_range_mut().start = 1;
Layout::scalar(dl, ptr)
}
@@ -363,7 +405,7 @@ pub fn layout_of_ty_query(
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
- let fields = fields.iter().collect::<Vec<_>>();
+ let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
.ok_or(LayoutError::Unknown)?
}
@@ -390,17 +432,17 @@ pub fn layout_of_ty_query(
pub fn layout_of_ty_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &Ty,
_: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- user_error!("infinite sized recursive type");
+ Err(LayoutError::RecursiveTypeWithoutIndirection)
}
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {
- cx.univariant::<RustcEnumVariantIdx, &&Layout>(
+ cx.univariant::<RustcFieldIdx, RustcEnumVariantIdx, &&Layout>(
dl,
- &[],
+ IndexSlice::empty(),
&ReprOptions::default(),
StructKind::AlwaysSized,
)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index 85ef649b8..39788a950 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -2,12 +2,14 @@
use std::{cmp, ops::Bound};
+use base_db::salsa::Cycle;
use hir_def::{
data::adt::VariantData,
layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout},
AdtId, EnumVariantId, LocalEnumVariantId, VariantId,
};
use la_arena::RawIdx;
+use rustc_dependencies::index::IndexVec;
use smallvec::SmallVec;
use triomphe::Arc;
@@ -20,8 +22,8 @@ use crate::{
use super::LayoutCx;
-pub(crate) fn struct_variant_idx() -> RustcEnumVariantIdx {
- RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from(0)))
+pub(crate) const fn struct_variant_idx() -> RustcEnumVariantIdx {
+ RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from_u32(0)))
}
pub fn layout_of_adt_query(
@@ -74,7 +76,7 @@ pub fn layout_of_adt_query(
.iter()
.map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>())
.collect::<SmallVec<[_; 1]>>();
- let variants = variants.iter().map(|it| it.iter().collect()).collect();
+ let variants = variants.iter().map(|it| it.iter().collect()).collect::<IndexVec<_, _>>();
let result = if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
} else {
@@ -105,7 +107,7 @@ pub fn layout_of_adt_query(
&& variants
.iter()
.next()
- .and_then(|it| it.last().map(|it| !it.is_unsized()))
+ .and_then(|it| it.iter().last().map(|it| !it.is_unsized()))
.unwrap_or(true),
)
.ok_or(LayoutError::SizeOverflow)?
@@ -119,7 +121,15 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
let attr = attrs.by_key(name).tt_values();
for tree in attr {
if let Some(it) = tree.token_trees.first() {
- if let Ok(it) = it.to_string().parse() {
+ let text = it.to_string().replace('_', "");
+ let (text, base) = match text.as_bytes() {
+ [b'0', b'x', ..] => (&text[2..], 16),
+ [b'0', b'o', ..] => (&text[2..], 8),
+ [b'0', b'b', ..] => (&text[2..], 2),
+ _ => (&*text, 10),
+ };
+
+ if let Ok(it) = u128::from_str_radix(text, base) {
return Bound::Included(it);
}
}
@@ -131,12 +141,12 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
pub fn layout_of_adt_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &AdtId,
_: &Substitution,
_: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- user_error!("infinite sized recursive type");
+ Err(LayoutError::RecursiveTypeWithoutIndirection)
}
/// Finds the appropriate Integer type and signedness for the given
@@ -160,11 +170,7 @@ fn repr_discr(
let discr = Integer::from_attr(dl, ity);
let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
if discr < fit {
- return Err(LayoutError::UserError(
- "Integer::repr_discr: `#[repr]` hint too small for \
- discriminant range of enum "
- .into(),
- ));
+ return Err(LayoutError::UserReprTooSmall);
}
return Ok((discr, ity.is_signed()));
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index ffdbb9de9..5e3a86c80 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -210,16 +210,13 @@ fn recursive() {
struct BoxLike<T: ?Sized>(*mut T);
struct Goal(BoxLike<Goal>);
}
- check_fail(
- r#"struct Goal(Goal);"#,
- LayoutError::UserError("infinite sized recursive type".into()),
- );
+ check_fail(r#"struct Goal(Goal);"#, LayoutError::RecursiveTypeWithoutIndirection);
check_fail(
r#"
struct Foo<T>(Foo<T>);
struct Goal(Foo<i32>);
"#,
- LayoutError::UserError("infinite sized recursive type".into()),
+ LayoutError::RecursiveTypeWithoutIndirection,
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs
index bbe855a14..939025461 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests/closure.rs
@@ -186,9 +186,9 @@ fn capture_specific_fields() {
fn match_pattern() {
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
- let y: X = X(2, 5, (7, 3));
+ let _y: X = X(2, 5, (7, 3));
move |x: i64| {
- match y {
+ match _y {
_ => x,
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 405bb001b..cf174feed 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -1,6 +1,7 @@
//! The type system. We currently use this to infer types for completion, hover
//! information and various assists.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
macro_rules! eprintln {
@@ -72,14 +73,15 @@ pub use infer::{
};
pub use interner::Interner;
pub use lower::{
- associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId,
- TyLoweringContext, ValueTyDefId,
+ associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, ParamLoweringMode,
+ TyDefId, TyLoweringContext, ValueTyDefId,
};
pub use mapping::{
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id,
to_placeholder_idx,
};
+pub use method_resolution::check_orphan_rules;
pub use traits::TraitEnvironment;
pub use utils::{all_super_traits, is_fn_unsafe_to_call};
@@ -120,7 +122,7 @@ pub type TyKind = chalk_ir::TyKind<Interner>;
pub type TypeFlags = chalk_ir::TypeFlags;
pub type DynTy = chalk_ir::DynTy<Interner>;
pub type FnPointer = chalk_ir::FnPointer<Interner>;
-// pub type FnSubst = chalk_ir::FnSubst<Interner>;
+// pub type FnSubst = chalk_ir::FnSubst<Interner>; // a re-export so we don't lose the tuple constructor
pub use chalk_ir::FnSubst;
pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
pub type AliasTy = chalk_ir::AliasTy<Interner>;
@@ -320,8 +322,7 @@ impl CallableSig {
pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
CallableSig {
// FIXME: what to do about lifetime params? -> return PolyFnSig
- // FIXME: use `Arc::from_iter` when it becomes available
- params_and_return: Arc::from(
+ params_and_return: Arc::from_iter(
fn_ptr
.substitution
.clone()
@@ -330,8 +331,7 @@ impl CallableSig {
.0
.as_slice(Interner)
.iter()
- .map(|arg| arg.assert_ty_ref(Interner).clone())
- .collect::<Vec<_>>(),
+ .map(|arg| arg.assert_ty_ref(Interner).clone()),
),
is_varargs: fn_ptr.sig.variadic,
safety: fn_ptr.sig.safety,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 9a61f1535..97c4a741f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -10,7 +10,7 @@ use std::{
iter,
};
-use base_db::CrateId;
+use base_db::{salsa::Cycle, CrateId};
use chalk_ir::{
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
};
@@ -113,7 +113,9 @@ pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
resolver: &'a Resolver,
in_binders: DebruijnIndex,
- owner: TypeOwnerId,
+ // FIXME: Should not be an `Option` but `Resolver` currently does not return owners in all cases
+ // where expected
+ owner: Option<TypeOwnerId>,
/// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others
/// should be converted to variables. I think in practice, this isn't
@@ -127,6 +129,14 @@ pub struct TyLoweringContext<'a> {
impl<'a> TyLoweringContext<'a> {
pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver, owner: TypeOwnerId) -> Self {
+ Self::new_maybe_unowned(db, resolver, Some(owner))
+ }
+
+ pub fn new_maybe_unowned(
+ db: &'a dyn HirDatabase,
+ resolver: &'a Resolver,
+ owner: Option<TypeOwnerId>,
+ ) -> Self {
let impl_trait_mode = ImplTraitLoweringState::Disallowed;
let type_param_mode = ParamLoweringMode::Placeholder;
let in_binders = DebruijnIndex::INNERMOST;
@@ -213,10 +223,11 @@ impl<'a> TyLoweringContext<'a> {
}
pub fn lower_const(&self, const_ref: &ConstRef, const_type: Ty) -> Const {
+ let Some(owner) = self.owner else { return unknown_const(const_type) };
const_or_path_to_chalk(
self.db,
self.resolver,
- self.owner,
+ owner,
const_type,
const_ref,
self.type_param_mode,
@@ -407,11 +418,7 @@ impl<'a> TyLoweringContext<'a> {
drop(expander);
let ty = self.lower_ty(&type_ref);
- self.expander
- .borrow_mut()
- .as_mut()
- .unwrap()
- .exit(self.db.upcast(), mark);
+ self.expander.borrow_mut().as_mut().unwrap().exit(mark);
Some(ty)
}
_ => {
@@ -768,7 +775,7 @@ impl<'a> TyLoweringContext<'a> {
}
}
- fn substs_from_path_segment(
+ pub(super) fn substs_from_path_segment(
&self,
segment: PathSegment<'_>,
def: Option<GenericDefId>,
@@ -1097,10 +1104,25 @@ impl<'a> TyLoweringContext<'a> {
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
);
if let Some(type_ref) = &binding.type_ref {
- let ty = self.lower_ty(type_ref);
- let alias_eq =
- AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
- predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ if let (TypeRef::ImplTrait(bounds), ImplTraitLoweringState::Disallowed) =
+ (type_ref, &self.impl_trait_mode)
+ {
+ for bound in bounds {
+ predicates.extend(
+ self.lower_type_bound(
+ bound,
+ TyKind::Alias(AliasTy::Projection(projection_ty.clone()))
+ .intern(Interner),
+ false,
+ ),
+ );
+ }
+ } else {
+ let ty = self.lower_ty(type_ref);
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ predicates.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
}
for bound in binding.bounds.iter() {
predicates.extend(self.lower_type_bound(
@@ -1383,51 +1405,50 @@ pub(crate) fn generic_predicates_for_param_query(
let ctx = TyLoweringContext::new(db, &resolver, def.into())
.with_type_param_mode(ParamLoweringMode::Variable);
let generics = generics(db.upcast(), def);
- let mut predicates: Vec<_> = resolver
- .where_predicates_in_scope()
- // we have to filter out all other predicates *first*, before attempting to lower them
- .filter(|pred| match pred {
- WherePredicate::ForLifetime { target, bound, .. }
- | WherePredicate::TypeBound { target, bound, .. } => {
- match target {
- WherePredicateTypeTarget::TypeRef(type_ref) => {
- if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
- return false;
- }
- }
- &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
- let target_id = TypeOrConstParamId { parent: def, local_id };
- if target_id != param_id {
- return false;
- }
- }
- };
- match &**bound {
- TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
- // Only lower the bound if the trait could possibly define the associated
- // type we're looking for.
+ // we have to filter out all other predicates *first*, before attempting to lower them
+ let predicate = |pred: &&_| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound, .. } => {
+ let invalid_target = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => {
+ ctx.lower_ty_only_param(type_ref) != Some(param_id)
+ }
+ &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ let target_id = TypeOrConstParamId { parent: def, local_id };
+ target_id != param_id
+ }
+ };
+ if invalid_target {
+ return false;
+ }
+
+ match &**bound {
+ TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
+ // Only lower the bound if the trait could possibly define the associated
+ // type we're looking for.
- let assoc_name = match &assoc_name {
- Some(it) => it,
- None => return true,
- };
- let tr = match resolver.resolve_path_in_type_ns_fully(db.upcast(), path) {
- Some(TypeNs::TraitId(tr)) => tr,
- _ => return false,
- };
+ let Some(assoc_name) = &assoc_name else { return true };
+ let Some(TypeNs::TraitId(tr)) =
+ resolver.resolve_path_in_type_ns_fully(db.upcast(), path)
+ else {
+ return false;
+ };
- all_super_traits(db.upcast(), tr).iter().any(|tr| {
- db.trait_data(*tr).items.iter().any(|(name, item)| {
- matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
- })
+ all_super_traits(db.upcast(), tr).iter().any(|tr| {
+ db.trait_data(*tr).items.iter().any(|(name, item)| {
+ matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
})
- }
- TypeBound::Lifetime(_) | TypeBound::Error => false,
+ })
}
+ TypeBound::Lifetime(_) | TypeBound::Error => false,
}
- WherePredicate::Lifetime { .. } => false,
- })
+ }
+ WherePredicate::Lifetime { .. } => false,
+ };
+ let mut predicates: Vec<_> = resolver
+ .where_predicates_in_scope()
+ .filter(predicate)
.flat_map(|pred| {
ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
})
@@ -1444,13 +1465,12 @@ pub(crate) fn generic_predicates_for_param_query(
pub(crate) fn generic_predicates_for_param_recover(
_db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
_def: &GenericDefId,
_param_id: &TypeOrConstParamId,
_assoc_name: &Option<Name>,
) -> Arc<[Binders<QuantifiedWhereClause>]> {
- // FIXME: use `Arc::from_iter` when it becomes available
- Arc::from(vec![])
+ Arc::from_iter(None)
}
pub(crate) fn trait_environment_for_body_query(
@@ -1459,7 +1479,7 @@ pub(crate) fn trait_environment_for_body_query(
) -> Arc<TraitEnvironment> {
let Some(def) = def.as_generic_def_id() else {
let krate = def.module(db.upcast()).krate();
- return Arc::new(TraitEnvironment::empty(krate));
+ return TraitEnvironment::empty(krate);
};
db.trait_environment(def)
}
@@ -1519,7 +1539,7 @@ pub(crate) fn trait_environment_query(
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
- Arc::new(TraitEnvironment { krate, block: None, traits_from_clauses: traits_in_scope, env })
+ TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env)
}
/// Resolve the where clause(s) of an item with generics.
@@ -1588,69 +1608,54 @@ pub(crate) fn generic_defaults_query(
let generic_params = generics(db.upcast(), def);
let parent_start_idx = generic_params.len_self();
- let defaults = Arc::from(
- generic_params
- .iter()
- .enumerate()
- .map(|(idx, (id, p))| {
- match p {
- TypeOrConstParamData::TypeParamData(p) => {
- let mut ty = p
- .default
- .as_ref()
- .map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
- // Each default can only refer to previous parameters.
- // Type variable default referring to parameter coming
- // after it is forbidden (FIXME: report diagnostic)
- ty = fallback_bound_vars(ty, idx, parent_start_idx);
- crate::make_binders(db, &generic_params, ty.cast(Interner))
- }
- TypeOrConstParamData::ConstParamData(p) => {
- let mut val = p.default.as_ref().map_or_else(
- || {
- unknown_const_as_generic(
- db.const_param_ty(ConstParamId::from_unchecked(id)),
- )
- },
- |c| {
- let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
- c.cast(Interner)
- },
- );
- // Each default can only refer to previous parameters, see above.
- val = fallback_bound_vars(val, idx, parent_start_idx);
- make_binders(db, &generic_params, val)
- }
- }
- })
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- );
+ let defaults = Arc::from_iter(generic_params.iter().enumerate().map(|(idx, (id, p))| {
+ match p {
+ TypeOrConstParamData::TypeParamData(p) => {
+ let mut ty =
+ p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
+ // Each default can only refer to previous parameters.
+ // Type variable default referring to parameter coming
+ // after it is forbidden (FIXME: report diagnostic)
+ ty = fallback_bound_vars(ty, idx, parent_start_idx);
+ crate::make_binders(db, &generic_params, ty.cast(Interner))
+ }
+ TypeOrConstParamData::ConstParamData(p) => {
+ let mut val = p.default.as_ref().map_or_else(
+ || {
+ unknown_const_as_generic(
+ db.const_param_ty(ConstParamId::from_unchecked(id)),
+ )
+ },
+ |c| {
+ let c = ctx.lower_const(c, ctx.lower_ty(&p.ty));
+ c.cast(Interner)
+ },
+ );
+ // Each default can only refer to previous parameters, see above.
+ val = fallback_bound_vars(val, idx, parent_start_idx);
+ make_binders(db, &generic_params, val)
+ }
+ }
+ }));
defaults
}
pub(crate) fn generic_defaults_recover(
db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
def: &GenericDefId,
) -> Arc<[Binders<crate::GenericArg>]> {
let generic_params = generics(db.upcast(), *def);
// FIXME: this code is not covered in tests.
// we still need one default per parameter
- let defaults = Arc::from(
- generic_params
- .iter_id()
- .map(|id| {
- let val = match id {
- Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
- Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
- };
- crate::make_binders(db, &generic_params, val)
- })
- // FIXME: use `Arc::from_iter` when it becomes available
- .collect::<Vec<_>>(),
- );
+ let defaults = Arc::from_iter(generic_params.iter_id().map(|id| {
+ let val = match id {
+ Either::Left(_) => TyKind::Error.intern(Interner).cast(Interner),
+ Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ };
+ crate::make_binders(db, &generic_params, val)
+ }));
defaults
}
@@ -1774,10 +1779,11 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
let resolver = t.resolver(db.upcast());
let ctx = TyLoweringContext::new(db, &resolver, t.into())
.with_type_param_mode(ParamLoweringMode::Variable);
- if db.type_alias_data(t).is_extern {
+ let type_alias_data = db.type_alias_data(t);
+ if type_alias_data.is_extern {
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
} else {
- let type_ref = &db.type_alias_data(t).type_ref;
+ let type_ref = &type_alias_data.type_ref;
let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
make_binders(db, &generics, inner)
}
@@ -1866,7 +1872,7 @@ pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
}
}
-pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders<Ty> {
let generics = match *def {
TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
@@ -1916,7 +1922,7 @@ pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> T
pub(crate) fn impl_self_ty_recover(
db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
impl_id: &ImplId,
) -> Binders<Ty> {
let generics = generics(db.upcast(), (*impl_id).into());
@@ -2048,7 +2054,7 @@ pub(crate) fn const_or_path_to_chalk(
.intern_in_type_const(InTypeConstLoc {
id: it,
owner,
- thing: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
+ expected_ty: Box::new(InTypeConstIdMetadata(expected_ty.clone())),
})
.into();
intern_const_scalar(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index f3a5f69b2..041d61c1b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -27,8 +27,9 @@ use crate::{
primitive::{FloatTy, IntTy, UintTy},
static_lifetime, to_chalk_trait_id,
utils::all_super_traits,
- AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, InEnvironment,
- Interner, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, Goal, Guidance,
+ InEnvironment, Interner, Scalar, Solution, Substitution, TraitEnvironment, TraitRef,
+ TraitRefExt, Ty, TyBuilder, TyExt,
};
/// This is used as a key for indexing impls.
@@ -167,12 +168,9 @@ impl TraitImpls {
) -> Arc<[Arc<Self>]> {
let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
let crate_graph = db.crate_graph();
- // FIXME: use `Arc::from_iter` when it becomes available
- Arc::from(
- crate_graph
- .transitive_deps(krate)
- .map(|krate| db.trait_impls_in_crate(krate))
- .collect::<Vec<_>>(),
+
+ Arc::from_iter(
+ crate_graph.transitive_deps(krate).map(|krate| db.trait_impls_in_crate(krate)),
)
}
@@ -862,6 +860,62 @@ fn is_inherent_impl_coherent(
}
}
+/// Checks whether the impl satisfies the orphan rules.
+///
+/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true:
+/// - Trait is a local trait
+/// - All of
+/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
+ let substs = TyBuilder::placeholder_subst(db, impl_);
+ let Some(impl_trait) = db.impl_trait(impl_) else {
+ // not a trait impl
+ return true;
+ };
+
+ let local_crate = impl_.lookup(db.upcast()).container.krate();
+ let is_local = |tgt_crate| tgt_crate == local_crate;
+
+ let trait_ref = impl_trait.substitute(Interner, &substs);
+ let trait_id = from_chalk_trait_id(trait_ref.trait_id);
+ if is_local(trait_id.module(db.upcast()).krate()) {
+ // trait to be implemented is local
+ return true;
+ }
+
+ let unwrap_fundamental = |ty: Ty| match ty.kind(Interner) {
+ TyKind::Ref(_, _, referenced) => referenced.clone(),
+ &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => {
+ let struct_data = db.struct_data(s);
+ if struct_data.flags.contains(StructFlags::IS_FUNDAMENTAL) {
+ let next = subs.type_parameters(Interner).next();
+ match next {
+ Some(ty) => ty,
+ None => ty,
+ }
+ } else {
+ ty
+ }
+ }
+ _ => ty,
+ };
+ // - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+ let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| {
+ match unwrap_fundamental(ty).kind(Interner) {
+ &TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
+ TyKind::Error => true,
+ TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
+ is_local(from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate())
+ }),
+ _ => false,
+ }
+ });
+ // FIXME: param coverage
+ // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+ is_not_orphan
+}
+
pub fn iterate_path_candidates(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
@@ -1422,26 +1476,52 @@ fn is_valid_fn_candidate(
// We need to consider the bounds on the impl to distinguish functions of the same name
// for a type.
let predicates = db.generic_predicates(impl_id.into());
- let valid = predicates
- .iter()
- .map(|predicate| {
- let (p, b) = predicate
- .clone()
- .substitute(Interner, &impl_subst)
- // Skipping the inner binders is ok, as we don't handle quantified where
- // clauses yet.
- .into_value_and_skipped_binders();
- stdx::always!(b.len(Interner) == 0);
- p
- })
- // It's ok to get ambiguity here, as we may not have enough information to prove
- // obligations. We'll check if the user is calling the selected method properly
- // later anyway.
- .all(|p| table.try_obligation(p.cast(Interner)).is_some());
- match valid {
- true => IsValidCandidate::Yes,
- false => IsValidCandidate::No,
+ let goals = predicates.iter().map(|p| {
+ let (p, b) = p
+ .clone()
+ .substitute(Interner, &impl_subst)
+ // Skipping the inner binders is ok, as we don't handle quantified where
+ // clauses yet.
+ .into_value_and_skipped_binders();
+ stdx::always!(b.len(Interner) == 0);
+
+ p.cast::<Goal>(Interner)
+ });
+
+ for goal in goals.clone() {
+ let in_env = InEnvironment::new(&table.trait_env.env, goal);
+ let canonicalized = table.canonicalize(in_env);
+ let solution = table.db.trait_solve(
+ table.trait_env.krate,
+ table.trait_env.block,
+ canonicalized.value.clone(),
+ );
+
+ match solution {
+ Some(Solution::Unique(canonical_subst)) => {
+ canonicalized.apply_solution(
+ table,
+ Canonical {
+ binders: canonical_subst.binders,
+ value: canonical_subst.value.subst,
+ },
+ );
+ }
+ Some(Solution::Ambig(Guidance::Definite(substs))) => {
+ canonicalized.apply_solution(table, substs);
+ }
+ Some(_) => (),
+ None => return IsValidCandidate::No,
+ }
}
+
+ for goal in goals {
+ if table.try_obligation(goal).is_none() {
+ return IsValidCandidate::No;
+ }
+ }
+
+ IsValidCandidate::Yes
} else {
// For `ItemContainerId::TraitId`, we check if `self_ty` implements the trait in
// `iterate_trait_method_candidates()`.
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
index e953058cc..f1795e71d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -40,7 +40,6 @@ pub use monomorphization::{
use rustc_hash::FxHashMap;
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};
-use triomphe::Arc;
use super::consteval::{intern_const_scalar, try_const_usize};
@@ -147,7 +146,7 @@ impl<V, T> ProjectionElem<V, T> {
base = normalize(
db,
// FIXME: we should get this from caller
- Arc::new(TraitEnvironment::empty(krate)),
+ TraitEnvironment::empty(krate),
base,
);
}
@@ -243,16 +242,16 @@ impl Default for ProjectionStore {
}
impl ProjectionStore {
- fn shrink_to_fit(&mut self) {
+ pub fn shrink_to_fit(&mut self) {
self.id_to_proj.shrink_to_fit();
self.proj_to_id.shrink_to_fit();
}
- fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
+ pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
self.proj_to_id.get(projection).copied()
}
- fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
+ pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
let new_id = ProjectionId(self.proj_to_id.len() as u32);
match self.proj_to_id.entry(projection) {
Entry::Occupied(id) => *id.get(),
@@ -267,20 +266,24 @@ impl ProjectionStore {
}
impl ProjectionId {
- const EMPTY: ProjectionId = ProjectionId(0);
+ pub const EMPTY: ProjectionId = ProjectionId(0);
+
+ pub fn is_empty(self) -> bool {
+ self == ProjectionId::EMPTY
+ }
- fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
+ pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
store.id_to_proj.get(&self).unwrap()
}
- fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
+ pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
let mut current = self.lookup(store).to_vec();
current.push(projection);
store.intern(current.into())
}
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Place {
pub local: LocalId,
pub projection: ProjectionId,
@@ -1007,7 +1010,7 @@ pub enum Rvalue {
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum StatementKind {
Assign(Place, Rvalue),
- //FakeRead(Box<(FakeReadCause, Place)>),
+ FakeRead(Place),
//SetDiscriminant {
// place: Box<Place>,
// variant_index: VariantIdx,
@@ -1069,6 +1072,10 @@ pub struct MirBody {
}
impl MirBody {
+ pub fn local_to_binding_map(&self) -> ArenaMap<LocalId, BindingId> {
+ self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
+ }
+
fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
fn for_operand(
op: &mut Operand,
@@ -1109,7 +1116,9 @@ impl MirBody {
}
}
}
- StatementKind::Deinit(p) => f(p, &mut self.projection_store),
+ StatementKind::FakeRead(p) | StatementKind::Deinit(p) => {
+ f(p, &mut self.projection_store)
+ }
StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
@@ -1186,3 +1195,9 @@ pub enum MirSpan {
}
impl_from!(ExprId, PatId for MirSpan);
+
+impl From<&ExprId> for MirSpan {
+ fn from(value: &ExprId) -> Self {
+ (*value).into()
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index 41fb12965..74c5efd6c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -24,6 +24,7 @@ use super::{
pub enum MutabilityReason {
Mut { spans: Vec<MirSpan> },
Not,
+ Unused,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -144,7 +145,8 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
}
}
},
- StatementKind::Deinit(_)
+ StatementKind::FakeRead(_)
+ | StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
@@ -264,7 +266,10 @@ fn ever_initialized_map(
is_ever_initialized = false;
}
}
- StatementKind::Deinit(_) | StatementKind::Nop | StatementKind::StorageLive(_) => (),
+ StatementKind::Deinit(_)
+ | StatementKind::FakeRead(_)
+ | StatementKind::Nop
+ | StatementKind::StorageLive(_) => (),
}
}
let Some(terminator) = &block.terminator else {
@@ -331,16 +336,37 @@ fn ever_initialized_map(
result
}
+fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, MutabilityReason>) {
+ match &mut result[local] {
+ MutabilityReason::Mut { spans } => spans.push(span),
+ it @ (MutabilityReason::Not | MutabilityReason::Unused) => {
+ *it = MutabilityReason::Mut { spans: vec![span] }
+ }
+ };
+}
+
+fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) {
+ match &mut result[local] {
+ it @ MutabilityReason::Unused => {
+ *it = MutabilityReason::Not;
+ }
+ _ => (),
+ };
+}
+
+fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap<LocalId, MutabilityReason>) {
+ if let Operand::Copy(p) | Operand::Move(p) = arg {
+ record_usage(p.local, result);
+ }
+}
+
fn mutability_of_locals(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let mut result: ArenaMap<LocalId, MutabilityReason> =
- body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect();
- let mut push_mut_span = |local, span| match &mut result[local] {
- MutabilityReason::Mut { spans } => spans.push(span),
- it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] },
- };
+ body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect();
+
let ever_init_maps = ever_initialized_map(db, body);
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
let block = &body.basic_blocks[block_id];
@@ -350,23 +376,51 @@ fn mutability_of_locals(
match place_case(db, body, place) {
ProjectionCase::Direct => {
if ever_init_map.get(place.local).copied().unwrap_or_default() {
- push_mut_span(place.local, statement.span);
+ push_mut_span(place.local, statement.span, &mut result);
} else {
ever_init_map.insert(place.local, true);
}
}
ProjectionCase::DirectPart => {
// Partial initialization is not supported, so it is definitely `mut`
- push_mut_span(place.local, statement.span);
+ push_mut_span(place.local, statement.span, &mut result);
+ }
+ ProjectionCase::Indirect => {
+ record_usage(place.local, &mut result);
}
- ProjectionCase::Indirect => (),
+ }
+ match value {
+ Rvalue::CopyForDeref(p)
+ | Rvalue::Discriminant(p)
+ | Rvalue::Len(p)
+ | Rvalue::Ref(_, p) => {
+ record_usage(p.local, &mut result);
+ }
+ Rvalue::Use(o)
+ | Rvalue::Repeat(o, _)
+ | Rvalue::Cast(_, o, _)
+ | Rvalue::UnaryOp(_, o) => record_usage_for_operand(o, &mut result),
+ Rvalue::CheckedBinaryOp(_, o1, o2) => {
+ for o in [o1, o2] {
+ record_usage_for_operand(o, &mut result);
+ }
+ }
+ Rvalue::Aggregate(_, args) => {
+ for arg in args.iter() {
+ record_usage_for_operand(arg, &mut result);
+ }
+ }
+ Rvalue::ShallowInitBox(_, _) | Rvalue::ShallowInitBoxWithAlloc(_) => (),
}
if let Rvalue::Ref(BorrowKind::Mut { .. }, p) = value {
if place_case(db, body, p) != ProjectionCase::Indirect {
- push_mut_span(p.local, statement.span);
+ push_mut_span(p.local, statement.span, &mut result);
}
}
}
+ StatementKind::FakeRead(p) => {
+ record_usage(p.local, &mut result);
+ }
StatementKind::StorageDead(p) => {
ever_init_map.insert(*p, false);
}
@@ -386,15 +440,21 @@ fn mutability_of_locals(
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::GeneratorDrop
- | TerminatorKind::SwitchInt { .. }
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. } => (),
- TerminatorKind::Call { destination, .. } => {
+ TerminatorKind::SwitchInt { discr, targets: _ } => {
+ record_usage_for_operand(discr, &mut result);
+ }
+ TerminatorKind::Call { destination, args, func, .. } => {
+ record_usage_for_operand(func, &mut result);
+ for arg in args.iter() {
+ record_usage_for_operand(arg, &mut result);
+ }
if destination.projection.lookup(&body.projection_store).len() == 0 {
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
- push_mut_span(destination.local, MirSpan::Unknown);
+ push_mut_span(destination.local, MirSpan::Unknown, &mut result);
} else {
ever_init_map.insert(destination.local, true);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index 4364e0d32..fbfb6ff8c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -21,7 +21,7 @@ use hir_def::{
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
StaticId, VariantId,
};
-use hir_expand::{mod_path::ModPath, InFile};
+use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet};
@@ -162,7 +162,7 @@ pub struct Evaluator<'a> {
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
/// Constantly dropping and creating `Locals` is very costly. We store
- /// old locals that we normaly want to drop here, to reuse their allocations
+ /// old locals that we normally want to drop here, to reuse their allocations
/// later.
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
cached_ptr_size: usize,
@@ -375,10 +375,7 @@ impl MirEvalError {
Err(_) => continue,
},
MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.syntax_node_ptr()),
Err(_) => continue,
},
MirSpan::Unknown => continue,
@@ -842,6 +839,7 @@ impl Evaluator<'_> {
}
StatementKind::Deinit(_) => not_supported!("de-init statement"),
StatementKind::StorageLive(_)
+ | StatementKind::FakeRead(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
@@ -2301,7 +2299,7 @@ impl Evaluator<'_> {
match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? {
MirOrDynIndex::Dyn(self_ty_idx) => {
// In the layout of current possible receiver, which at the moment of writing this code is one of
- // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
+ // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible receivers,
// the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
// the type.
let first_arg = arg_bytes.clone().next().unwrap();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
index 803ef631f..2de99e416 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
@@ -1045,7 +1045,7 @@ impl Evaluator<'_> {
}
"transmute" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("trasmute arg is not provided"));
+ return Err(MirEvalError::TypeError("transmute arg is not provided"));
};
destination.write_from_interval(self, arg.interval)
}
@@ -1065,7 +1065,7 @@ impl Evaluator<'_> {
}
"ctlz" | "ctlz_nonzero" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("cttz arg is not provided"));
+ return Err(MirEvalError::TypeError("ctlz arg is not provided"));
};
let result =
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index dd2dba717..639fabc19 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -2,7 +2,7 @@
use std::{fmt::Write, iter, mem};
-use base_db::FileId;
+use base_db::{salsa::Cycle, FileId};
use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
use hir_def::{
body::Body,
@@ -105,9 +105,14 @@ pub enum MirLowerError {
/// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves.
struct DropScopeToken;
impl DropScopeToken {
- fn pop_and_drop(self, ctx: &mut MirLowerCtx<'_>, current: BasicBlockId) -> BasicBlockId {
+ fn pop_and_drop(
+ self,
+ ctx: &mut MirLowerCtx<'_>,
+ current: BasicBlockId,
+ span: MirSpan,
+ ) -> BasicBlockId {
std::mem::forget(self);
- ctx.pop_drop_scope_internal(current)
+ ctx.pop_drop_scope_internal(current, span)
}
/// It is useful when we want a drop scope is syntaxically closed, but we don't want to execute any drop
@@ -529,6 +534,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
+ self.push_fake_read(current, cond_place, expr_id.into());
let (then_target, else_target) =
self.pattern_match(current, None, cond_place, *pat)?;
self.write_bytes_to_place(
@@ -581,7 +587,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
let scope = this.push_drop_scope();
if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
- current = scope.pop_and_drop(this, current);
+ current = scope.pop_and_drop(this, current, body.into());
this.set_goto(current, begin, expr_id.into());
} else {
scope.pop_assume_dropped(this);
@@ -668,6 +674,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
+ self.push_fake_read(current, cond_place, expr_id.into());
let mut end = None;
for MatchArm { pat, guard, expr } in arms.iter() {
let (then, mut otherwise) =
@@ -718,7 +725,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
.ok_or(MirLowerError::ContinueWithoutLoop)?,
};
let begin = loop_data.begin;
- current = self.drop_until_scope(loop_data.drop_scope_index, current);
+ current =
+ self.drop_until_scope(loop_data.drop_scope_index, current, expr_id.into());
self.set_goto(current, begin, expr_id.into());
Ok(None)
}
@@ -757,7 +765,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.current_loop_blocks.as_ref().unwrap().drop_scope_index,
),
};
- current = self.drop_until_scope(drop_scope, current);
+ current = self.drop_until_scope(drop_scope, current, expr_id.into());
self.set_goto(current, end, expr_id.into());
Ok(None)
}
@@ -771,7 +779,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
}
}
- current = self.drop_until_scope(0, current);
+ current = self.drop_until_scope(0, current, expr_id.into());
self.set_terminator(current, TerminatorKind::Return, expr_id.into());
Ok(None)
}
@@ -1299,6 +1307,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
if matches!(&self.body.exprs[lhs], Expr::Underscore) {
+ self.push_fake_read_for_operand(current, rhs_op, span);
return Ok(Some(current));
}
if matches!(
@@ -1575,6 +1584,16 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.result.basic_blocks[block].statements.push(statement);
}
+ fn push_fake_read(&mut self, block: BasicBlockId, p: Place, span: MirSpan) {
+ self.push_statement(block, StatementKind::FakeRead(p).with_span(span));
+ }
+
+ fn push_fake_read_for_operand(&mut self, block: BasicBlockId, operand: Operand, span: MirSpan) {
+ if let Operand::Move(p) | Operand::Copy(p) = operand {
+ self.push_fake_read(block, p, span);
+ }
+ }
+
fn push_assignment(
&mut self,
block: BasicBlockId,
@@ -1733,6 +1752,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
current = c;
+ self.push_fake_read(current, init_place, span);
(current, else_block) =
self.pattern_match(current, None, init_place, *pat)?;
match (else_block, else_branch) {
@@ -1760,14 +1780,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
}
- hir_def::hir::Statement::Expr { expr, has_semi: _ } => {
+ &hir_def::hir::Statement::Expr { expr, has_semi: _ } => {
let scope2 = self.push_drop_scope();
- let Some((_, c)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((p, c)) = self.lower_expr_as_place(current, expr, true)? else {
scope2.pop_assume_dropped(self);
scope.pop_assume_dropped(self);
return Ok(None);
};
- current = scope2.pop_and_drop(self, c);
+ self.push_fake_read(c, p, expr.into());
+ current = scope2.pop_and_drop(self, c, expr.into());
}
}
}
@@ -1778,7 +1799,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
current = c;
}
- current = scope.pop_and_drop(self, current);
+ current = scope.pop_and_drop(self, current, span);
Ok(Some(current))
}
@@ -1858,9 +1879,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
- fn drop_until_scope(&mut self, scope_index: usize, mut current: BasicBlockId) -> BasicBlockId {
+ fn drop_until_scope(
+ &mut self,
+ scope_index: usize,
+ mut current: BasicBlockId,
+ span: MirSpan,
+ ) -> BasicBlockId {
for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() {
- self.emit_drop_and_storage_dead_for_scope(scope, &mut current);
+ self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span);
}
current
}
@@ -1876,17 +1902,22 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
/// Don't call directly
- fn pop_drop_scope_internal(&mut self, mut current: BasicBlockId) -> BasicBlockId {
+ fn pop_drop_scope_internal(
+ &mut self,
+ mut current: BasicBlockId,
+ span: MirSpan,
+ ) -> BasicBlockId {
let scope = self.drop_scopes.pop().unwrap();
- self.emit_drop_and_storage_dead_for_scope(&scope, &mut current);
+ self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span);
current
}
fn pop_drop_scope_assert_finished(
&mut self,
mut current: BasicBlockId,
+ span: MirSpan,
) -> Result<BasicBlockId> {
- current = self.pop_drop_scope_internal(current);
+ current = self.pop_drop_scope_internal(current, span);
if !self.drop_scopes.is_empty() {
implementation_error!("Mismatched count between drop scope push and pops");
}
@@ -1897,6 +1928,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
&mut self,
scope: &DropScope,
current: &mut Idx<BasicBlock>,
+ span: MirSpan,
) {
for &l in scope.locals.iter().rev() {
if !self.result.locals[l].ty.clone().is_copy(self.db, self.owner) {
@@ -1904,13 +1936,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.set_terminator(
prev,
TerminatorKind::Drop { place: l.into(), target: *current, unwind: None },
- MirSpan::Unknown,
+ span,
);
}
- self.push_statement(
- *current,
- StatementKind::StorageDead(l).with_span(MirSpan::Unknown),
- );
+ self.push_statement(*current, StatementKind::StorageDead(l).with_span(span));
}
}
}
@@ -1987,7 +2016,7 @@ pub fn mir_body_for_closure_query(
|_| true,
)?;
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
- let current = ctx.pop_drop_scope_assert_finished(current)?;
+ let current = ctx.pop_drop_scope_assert_finished(current, root.into())?;
ctx.set_terminator(current, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
@@ -2081,7 +2110,7 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
pub fn mir_body_recover(
_db: &dyn HirDatabase,
- _cycle: &[String],
+ _cycle: &Cycle,
_def: &DefWithBodyId,
) -> Result<Arc<MirBody>> {
Err(MirLowerError::Loop)
@@ -2131,7 +2160,7 @@ pub fn lower_to_mir(
ctx.lower_params_and_bindings([].into_iter(), binding_picker)?
};
if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
- let current = ctx.pop_drop_scope_assert_finished(current)?;
+ let current = ctx.pop_drop_scope_assert_finished(current, root_expr.into())?;
ctx.set_terminator(current, TerminatorKind::Return, root_expr.into());
}
Ok(ctx.result)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
index 270f75ad9..1120bb1c1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -284,6 +284,7 @@ impl MirLowerCtx<'_> {
);
(current, current_else) = self.pattern_match_binding(
id,
+ *slice,
next_place,
(*slice).into(),
current,
@@ -395,6 +396,7 @@ impl MirLowerCtx<'_> {
if mode == MatchingMode::Bind {
self.pattern_match_binding(
*id,
+ pattern,
cond_place,
pattern.into(),
current,
@@ -431,13 +433,14 @@ impl MirLowerCtx<'_> {
fn pattern_match_binding(
&mut self,
id: BindingId,
+ pat: PatId,
cond_place: Place,
span: MirSpan,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?;
- let mode = self.infer.binding_modes[id];
+ let mode = self.infer.binding_modes[pat];
self.push_storage_live(id, current)?;
self.push_assignment(
current,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
index df16d0d82..8da03eef2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
@@ -9,6 +9,7 @@
use std::mem;
+use base_db::salsa::Cycle;
use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
@@ -248,6 +249,7 @@ impl Filler<'_> {
| Rvalue::CopyForDeref(_) => (),
},
StatementKind::Deinit(_)
+ | StatementKind::FakeRead(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
@@ -299,7 +301,7 @@ pub fn monomorphized_mir_body_query(
pub fn monomorphized_mir_body_recover(
_: &dyn HirDatabase,
- _: &[String],
+ _: &Cycle,
_: &DefWithBodyId,
_: &Substitution,
_: &Arc<crate::TraitEnvironment>,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
index 0108859ff..a91f90bc2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
@@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> {
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
- local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(),
+ local_to_binding: body.local_to_binding_map(),
result,
indent,
..*self
@@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
}
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
- let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect();
+ let local_to_binding = body.local_to_binding_map();
MirPrettyCtx {
body,
db,
@@ -233,6 +233,11 @@ impl<'a> MirPrettyCtx<'a> {
this.place(p);
wln!(this, ");");
}
+ StatementKind::FakeRead(p) => {
+ w!(this, "FakeRead(");
+ this.place(p);
+ wln!(this, ");");
+ }
StatementKind::Nop => wln!(this, "Nop;"),
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
index 7d19e0a19..6f4aef22d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -30,6 +30,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
+ this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index d22d0d85c..1446e83fa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -269,12 +269,7 @@ fn pat_node(
Some(match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
- sp.map(|ptr| {
- ptr.either(
- |it| it.to_node(&root).syntax().clone(),
- |it| it.to_node(&root).syntax().clone(),
- )
- })
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => return None,
})
@@ -303,12 +298,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let syntax_ptr = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
- sp.map(|ptr| {
- ptr.either(
- |it| it.to_node(&root).syntax().clone(),
- |it| it.to_node(&root).syntax().clone(),
- )
- })
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
};
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
index bb15ca8c4..28e84e480 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -9,11 +9,10 @@ use super::visit_module;
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
let (mut db, pos) = TestDB::with_position(
"
- //- /lib.rs
- fn foo() -> i32 {
- $01 + 1
- }
- ",
+//- /lib.rs
+fn foo() -> i32 {
+ $01 + 1
+}",
);
{
let events = db.log_executed(|| {
@@ -27,12 +26,11 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
}
let new_text = "
- fn foo() -> i32 {
- 1
- +
- 1
- }
- ";
+fn foo() -> i32 {
+ 1
+ +
+ 1
+}";
db.set_file_text(pos.file_id, Arc::from(new_text));
@@ -47,3 +45,55 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
}
}
+
+#[test]
+fn typing_inside_a_function_should_not_invalidate_types_in_another() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+//- /lib.rs
+fn foo() -> f32 {
+ 1.0 + 2.0
+}
+fn bar() -> i32 {
+ $01 + 1
+}
+fn baz() -> i32 {
+ 1 + 1
+}",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{events:?}").contains("infer"))
+ }
+
+ let new_text = "
+fn foo() -> f32 {
+ 1.0 + 2.0
+}
+fn bar() -> i32 {
+ 53
+}
+fn baz() -> i32 {
+ 1 + 1
+}
+";
+
+ db.set_file_text(pos.file_id, Arc::from(new_text));
+
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = module.def_map(&db);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
index 1e6e946a1..d16e0eb01 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
@@ -684,8 +684,7 @@ fn infer_builtin_macros_line() {
}
"#,
expect![[r#"
- !0..1 '0': i32
- !0..6 '0asu32': u32
+ !0..4 '0u32': u32
63..87 '{ ...!(); }': ()
73..74 'x': u32
"#]],
@@ -723,8 +722,7 @@ fn infer_builtin_macros_column() {
}
"#,
expect![[r#"
- !0..1 '0': i32
- !0..6 '0asu32': u32
+ !0..4 '0u32': u32
65..91 '{ ...!(); }': ()
75..76 'x': u32
"#]],
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
index 0f5a3e175..7234af2d6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
@@ -1129,3 +1129,65 @@ fn foo() {
"#,
);
}
+
+#[test]
+fn generic_alias() {
+ check_types(
+ r#"
+type Wrap<T> = T;
+
+enum X {
+ A { cool: u32, stuff: u32 },
+ B,
+}
+
+fn main() {
+ let wrapped = Wrap::<X>::A {
+ cool: 100,
+ stuff: 100,
+ };
+
+ if let Wrap::<X>::A { cool, ..} = &wrapped {}
+ //^^^^ &u32
+}
+"#,
+ );
+}
+
+#[test]
+fn type_mismatch_pat_const_reference() {
+ check_no_mismatches(
+ r#"
+const TEST_STR: &'static str = "abcd";
+
+fn main() {
+ let s = "abcd";
+ match s {
+ TEST_STR => (),
+ _ => (),
+ }
+}
+
+ "#,
+ );
+ check(
+ r#"
+struct Foo<T>(T);
+
+impl<T> Foo<T> {
+ const TEST_I32_REF: &'static i32 = &3;
+ const TEST_I32: i32 = 3;
+}
+
+fn main() {
+ match &6 {
+ Foo::<i32>::TEST_I32_REF => (),
+ Foo::<i32>::TEST_I32 => (),
+ //^^^^^^^^^^^^^^^^^^^^ expected &i32, got i32
+ _ => (),
+ }
+}
+
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index 6ea059065..35079e709 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -2000,3 +2000,15 @@ fn test() {
"#,
);
}
+
+#[test]
+fn rustc_test_issue_52437() {
+ check_types(
+ r#"
+ fn main() {
+ let x = [(); &(&'static: loop { |x| {}; }) as *const _ as usize]
+ //^ [(); _]
+ }
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index d36b885ec..003ae60e8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -2598,6 +2598,34 @@ fn test<T: Trait>() {
}
#[test]
+fn associated_type_in_type_bound() {
+ check_types(
+ r#"
+//- minicore: deref
+fn fb(f: Foo<&u8>) {
+ f.foobar();
+ //^^^^^^^^^^ u8
+}
+trait Bar {
+ fn bar(&self) -> u8;
+}
+impl Bar for u8 {
+ fn bar(&self) -> u8 { *self }
+}
+
+struct Foo<F> {
+ foo: F,
+}
+impl<F: core::ops::Deref<Target = impl Bar>> Foo<F> {
+ fn foobar(&self) -> u8 {
+ self.foo.deref().bar()
+ }
+}
+"#,
+ )
+}
+
+#[test]
fn dyn_trait_through_chalk() {
check_types(
r#"
@@ -4439,42 +4467,42 @@ fn test(v: S<i32>) {
fn associated_type_in_argument() {
check(
r#"
- trait A {
- fn m(&self) -> i32;
- }
+trait A {
+ fn m(&self) -> i32;
+}
- fn x<T: B>(k: &<T as B>::Ty) {
- k.m();
- }
+fn x<T: B>(k: &<T as B>::Ty) {
+ k.m();
+}
- struct X;
- struct Y;
+struct X;
+struct Y;
- impl A for X {
- fn m(&self) -> i32 {
- 8
- }
+impl A for X {
+ fn m(&self) -> i32 {
+ 8
}
+}
- impl A for Y {
- fn m(&self) -> i32 {
- 32
- }
+impl A for Y {
+ fn m(&self) -> i32 {
+ 32
}
+}
- trait B {
- type Ty: A;
- }
+trait B {
+ type Ty: A;
+}
- impl B for u16 {
- type Ty = X;
- }
+impl B for u16 {
+ type Ty = X;
+}
- fn ttt() {
- let inp = Y;
- x::<u16>(&inp);
- //^^^^ expected &X, got &Y
- }
- "#,
+fn ttt() {
+ let inp = Y;
+ x::<u16>(&inp);
+ //^^^^ expected &X, got &Y
+}
+"#,
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index 3c7cfbaed..b6bc76bc9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -48,18 +48,32 @@ pub struct TraitEnvironment {
pub krate: CrateId,
pub block: Option<BlockId>,
// FIXME make this a BTreeMap
- pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>,
+ traits_from_clauses: Box<[(Ty, TraitId)]>,
pub env: chalk_ir::Environment<Interner>,
}
impl TraitEnvironment {
- pub fn empty(krate: CrateId) -> Self {
- TraitEnvironment {
+ pub fn empty(krate: CrateId) -> Arc<Self> {
+ Arc::new(TraitEnvironment {
krate,
block: None,
- traits_from_clauses: Vec::new(),
+ traits_from_clauses: Box::default(),
env: chalk_ir::Environment::new(Interner),
- }
+ })
+ }
+
+ pub fn new(
+ krate: CrateId,
+ block: Option<BlockId>,
+ traits_from_clauses: Box<[(Ty, TraitId)]>,
+ env: chalk_ir::Environment<Interner>,
+ ) -> Arc<Self> {
+ Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env })
+ }
+
+ // pub fn with_block(self: &mut Arc<Self>, block: BlockId) {
+ pub fn with_block(this: &mut Arc<Self>, block: BlockId) {
+ Arc::make_mut(this).block = Some(block);
}
pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ {
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
index f860ee948..4c1dfbc29 100644
--- a/src/tools/rust-analyzer/crates/hir/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -13,9 +13,9 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
-either = "1.7.0"
+either.workspace = true
arrayvec = "0.7.2"
-itertools = "0.10.5"
+itertools.workspace = true
smallvec.workspace = true
triomphe.workspace = true
once_cell = "1.17.1"
@@ -30,3 +30,6 @@ profile.workspace = true
stdx.workspace = true
syntax.workspace = true
tt.workspace = true
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index 796490abd..185853353 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -1,5 +1,6 @@
//! Attributes & documentation for hir types.
+use base_db::FileId;
use hir_def::{
attr::AttrsWithOwner,
item_scope::ItemInNs,
@@ -8,7 +9,10 @@ use hir_def::{
resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, ModuleDefId,
};
-use hir_expand::{hygiene::Hygiene, name::Name};
+use hir_expand::{
+ name::Name,
+ span::{RealSpanMap, SpanMapRef},
+};
use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
@@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
if ast_path.syntax().text() != link {
return None;
}
- ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
+ ModPath::from_src(
+ db.upcast(),
+ ast_path,
+ SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::BOGUS)),
+ )
};
let full = try_get_modpath(link);
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index 936581bfe..d98e3decd 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -3,10 +3,27 @@
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
-pub use hir_def::db::*;
+pub use hir_def::db::{
+ AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
+ ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
+ CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
+ EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
+ FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
+ FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery,
+ ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
+ InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
+ InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
+ InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
+ InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
+ InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery,
+ Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery,
+ StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery,
+ TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery,
+ UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery,
+};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
- ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
- MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
+ ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
+ ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
};
pub use hir_ty::db::*;
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index 479138b67..1cb36f9b0 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -3,7 +3,7 @@
//!
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
-pub use hir_ty::diagnostics::{CaseType, IncoherentImpl, IncorrectCase};
+pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
@@ -12,7 +12,7 @@ use hir_def::path::ModPath;
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
-use crate::{AssocItem, Field, Local, MacroKind, Type};
+use crate::{AssocItem, Field, Local, MacroKind, Trait, Type};
macro_rules! diagnostics {
($($diag:ident,)*) => {
@@ -53,6 +53,10 @@ diagnostics![
PrivateAssocItem,
PrivateField,
ReplaceFilterMapNextWithFindMap,
+ TraitImplIncorrectSafety,
+ TraitImplMissingAssocItems,
+ TraitImplRedundantAssocItems,
+ TraitImplOrphan,
TypedHole,
TypeMismatch,
UndeclaredLabel,
@@ -66,6 +70,7 @@ diagnostics![
UnresolvedModule,
UnresolvedProcMacro,
UnusedMut,
+ UnusedVariable,
];
#[derive(Debug)]
@@ -173,20 +178,19 @@ pub struct MalformedDerive {
#[derive(Debug)]
pub struct NoSuchField {
- pub field: InFile<Either<AstPtr<ast::RecordExprField>, AstPtr<ast::RecordPatField>>>,
+ pub field: InFile<AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>>,
pub private: bool,
}
#[derive(Debug)]
pub struct PrivateAssocItem {
- pub expr_or_pat:
- InFile<Either<AstPtr<ast::Expr>, Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>,
pub item: AssocItem,
}
#[derive(Debug)]
pub struct MismatchedTupleStructPatArgCount {
- pub expr_or_pat: InFile<Either<AstPtr<ast::Expr>, AstPtr<ast::Pat>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: usize,
pub found: usize,
}
@@ -227,7 +231,7 @@ pub struct MissingUnsafe {
#[derive(Debug)]
pub struct MissingFields {
pub file: HirFileId,
- pub field_list_parent: Either<AstPtr<ast::RecordExpr>, AstPtr<ast::RecordPat>>,
+ pub field_list_parent: AstPtr<Either<ast::RecordExpr, ast::RecordPat>>,
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
pub missed_fields: Vec<Name>,
}
@@ -254,7 +258,7 @@ pub struct MissingMatchArms {
#[derive(Debug)]
pub struct TypeMismatch {
- pub expr_or_pat: Either<InFile<AstPtr<ast::Expr>>, InFile<AstPtr<ast::Pat>>>,
+ pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: Type,
pub actual: Type,
}
@@ -271,7 +275,47 @@ pub struct UnusedMut {
}
#[derive(Debug)]
+pub struct UnusedVariable {
+ pub local: Local,
+}
+
+#[derive(Debug)]
pub struct MovedOutOfRef {
pub ty: Type,
pub span: InFile<SyntaxNodePtr>,
}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct IncoherentImpl {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplOrphan {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+}
+
+// FIXME: Split this off into the corresponding 4 rustc errors
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplIncorrectSafety {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+ pub should_be_safe: bool,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplMissingAssocItems {
+ pub file_id: HirFileId,
+ pub impl_: AstPtr<ast::Impl>,
+ pub missing: Vec<(Name, AssocItem)>,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct TraitImplRedundantAssocItems {
+ pub file_id: HirFileId,
+ pub trait_: Trait,
+ pub impl_: AstPtr<ast::Impl>,
+ pub assoc_item: (Name, AssocItem),
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index ac171026d..5847c8a9f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -1,6 +1,6 @@
//! HirDisplay implementations for various hir types.
use hir_def::{
- data::adt::VariantData,
+ data::adt::{StructKind, VariantData},
generics::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
},
@@ -163,7 +163,40 @@ impl HirDisplay for Struct {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
write_generic_params(def_id, f)?;
+
+ let variant_data = self.variant_data(f.db);
+ if let StructKind::Tuple = variant_data.kind() {
+ f.write_char('(')?;
+ let mut it = variant_data.fields().iter().peekable();
+
+ while let Some((id, _)) = it.next() {
+ let field = Field { parent: (*self).into(), id };
+ field.ty(f.db).hir_fmt(f)?;
+ if it.peek().is_some() {
+ f.write_str(", ")?;
+ }
+ }
+
+ f.write_str(");")?;
+ }
+
write_where_clause(def_id, f)?;
+
+ if let StructKind::Record = variant_data.kind() {
+ let fields = self.fields(f.db);
+ if fields.is_empty() {
+ f.write_str(" {}")?;
+ } else {
+ f.write_str(" {\n")?;
+ for field in self.fields(f.db) {
+ f.write_str(" ")?;
+ field.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+ }
+
Ok(())
}
}
@@ -176,6 +209,18 @@ impl HirDisplay for Enum {
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
+
+ let variants = self.variants(f.db);
+ if !variants.is_empty() {
+ f.write_str(" {\n")?;
+ for variant in variants {
+ f.write_str(" ")?;
+ variant.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+
Ok(())
}
}
@@ -188,6 +233,18 @@ impl HirDisplay for Union {
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
write_generic_params(def_id, f)?;
write_where_clause(def_id, f)?;
+
+ let fields = self.fields(f.db);
+ if !fields.is_empty() {
+ f.write_str(" {\n")?;
+ for field in self.fields(f.db) {
+ f.write_str(" ")?;
+ field.hir_fmt(f)?;
+ f.write_str(",\n")?;
+ }
+ f.write_str("}")?;
+ }
+
Ok(())
}
}
@@ -559,7 +616,7 @@ impl HirDisplay for TypeAlias {
write_where_clause(def_id, f)?;
if !data.bounds.is_empty() {
f.write_str(": ")?;
- f.write_joined(&data.bounds, " + ")?;
+ f.write_joined(data.bounds.iter(), " + ")?;
}
if let Some(ty) = &data.type_ref {
f.write_str(" = ")?;
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index b215ed38f..e0230fa37 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -17,7 +17,8 @@
//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "512"]
mod semantics;
@@ -33,7 +34,7 @@ pub mod symbols;
mod display;
-use std::{iter, ops::ControlFlow};
+use std::{iter, mem::discriminant, ops::ControlFlow};
use arrayvec::ArrayVec;
use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
@@ -53,20 +54,20 @@ use hir_def::{
resolver::{HasResolver, Resolver},
src::HasSource as _,
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
- EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, HasModule, ImplId,
- InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup,
- MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
- TypeOrConstParamId, TypeParamId, UnionId,
+ EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
+ ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId,
+ Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
-use hir_expand::{name::name, MacroCallKind};
+use hir_expand::{attrs::collect_attrs, name::name, MacroCallKind};
use hir_ty::{
- all_super_traits, autoderef,
+ all_super_traits, autoderef, check_orphan_rules,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
known_const_to_ast,
- layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
+ layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
- mir::{self, interpret_mir},
+ mir::interpret_mir,
primitive::UintTy,
traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
@@ -80,7 +81,7 @@ use once_cell::unsync::Lazy;
use rustc_hash::FxHashSet;
use stdx::{impl_from, never};
use syntax::{
- ast::{self, HasAttrs as _, HasDocComments, HasName},
+ ast::{self, HasAttrs as _, HasName},
AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, T,
};
use triomphe::Arc;
@@ -89,19 +90,11 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{resolve_doc_path_on, HasAttrs},
- diagnostics::{
- AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
- IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
- MacroExpansionParseError, MalformedDerive, MismatchedArgCount,
- MismatchedTupleStructPatArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
- MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
- ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
- UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
- UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
- UnresolvedProcMacro, UnusedMut,
- },
+ diagnostics::*,
has_source::HasSource,
- semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+ semantics::{
+ DescendPreference, PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits,
+ },
};
// Be careful with these re-exports.
@@ -132,15 +125,18 @@ pub use {
},
hir_expand::{
attrs::{Attr, AttrId},
+ hygiene::{marks_rev, SyntaxContextExt},
name::{known, Name},
- ExpandResult, HirFileId, InFile, MacroFile, Origin,
+ tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
+ MacroFileIdExt,
},
hir_ty::{
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError,
- mir::MirEvalError,
PointerCast, Safety,
},
+ // FIXME: Properly encapsulate mir
+ hir_ty::{mir, Interner as ChalkTyInterner},
};
// These are negative re-exports: pub using these names is forbidden, they
@@ -148,7 +144,10 @@ pub use {
#[allow(unused)]
use {
hir_def::path::Path,
- hir_expand::{hygiene::Hygiene, name::AsName},
+ hir_expand::{
+ name::AsName,
+ span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
+ },
};
/// hir::Crate describes a single crate. It's the main interface with which
@@ -452,15 +451,7 @@ impl HasVisibility for ModuleDef {
impl Module {
/// Name of this module.
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
- let def_map = self.id.def_map(db.upcast());
- let parent = def_map[self.id.local_id].parent?;
- def_map[parent].children.iter().find_map(|(name, module_id)| {
- if *module_id == self.id.local_id {
- Some(name.clone())
- } else {
- None
- }
- })
+ self.id.name(db.upcast())
}
/// Returns the crate this module is part of.
@@ -571,6 +562,7 @@ impl Module {
if def_map[m.id.local_id].origin.is_inline() {
m.diagnostics(db, acc)
}
+ acc.extend(def.diagnostics(db))
}
ModuleDef::Trait(t) => {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
@@ -610,29 +602,141 @@ impl Module {
let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
+ let mut impl_assoc_items_scratch = vec![];
for impl_def in self.impl_defs(db) {
let loc = impl_def.id.lookup(db.upcast());
let tree = loc.id.item_tree(db.upcast());
let node = &tree[loc.id.value];
let file_id = loc.id.file_id();
- if file_id.is_builtin_derive(db.upcast()) {
+ if file_id.macro_file().map_or(false, |it| it.is_builtin_derive(db.upcast())) {
// these expansion come from us, diagnosing them is a waste of resources
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
continue;
}
+ let ast_id_map = db.ast_id_map(file_id);
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
- let ast_id_map = db.ast_id_map(file_id);
-
acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
}
- for item in impl_def.items(db) {
- let def: DefWithBody = match item {
+ if !impl_def.check_orphan_rules(db) {
+ acc.push(TraitImplOrphan { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
+ }
+
+ let trait_ = impl_def.trait_(db);
+ let trait_is_unsafe = trait_.map_or(false, |t| t.is_unsafe(db));
+ let impl_is_negative = impl_def.is_negative(db);
+ let impl_is_unsafe = impl_def.is_unsafe(db);
+
+ let drop_maybe_dangle = (|| {
+ // FIXME: This can be simplified a lot by exposing hir-ty's utils.rs::Generics helper
+ let trait_ = trait_?;
+ let drop_trait = db.lang_item(self.krate().into(), LangItem::Drop)?.as_trait()?;
+ if drop_trait != trait_.into() {
+ return None;
+ }
+ let parent = impl_def.id.into();
+ let generic_params = db.generic_params(parent);
+ let lifetime_params = generic_params.lifetimes.iter().map(|(local_id, _)| {
+ GenericParamId::LifetimeParamId(LifetimeParamId { parent, local_id })
+ });
+ let type_params = generic_params
+ .iter()
+ .filter(|(_, it)| it.type_param().is_some())
+ .map(|(local_id, _)| {
+ GenericParamId::TypeParamId(TypeParamId::from_unchecked(
+ TypeOrConstParamId { parent, local_id },
+ ))
+ });
+ let res = type_params
+ .chain(lifetime_params)
+ .any(|p| db.attrs(AttrDefId::GenericParamId(p)).by_key("may_dangle").exists());
+ Some(res)
+ })()
+ .unwrap_or(false);
+
+ match (impl_is_unsafe, trait_is_unsafe, impl_is_negative, drop_maybe_dangle) {
+ // unsafe negative impl
+ (true, _, true, _) |
+ // unsafe impl for safe trait
+ (true, false, _, false) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: true }.into()),
+ // safe impl for unsafe trait
+ (false, true, false, _) |
+ // safe impl of dangling drop
+ (false, false, _, true) => acc.push(TraitImplIncorrectSafety { impl_: ast_id_map.get(node.ast_id()), file_id, should_be_safe: false }.into()),
+ _ => (),
+ };
+
+ // Negative impls can't have items, don't emit missing items diagnostic for them
+ if let (false, Some(trait_)) = (impl_is_negative, trait_) {
+ let items = &db.trait_data(trait_.into()).items;
+ let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
+ AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
+ AssocItemId::ConstId(id) => Const::from(id).value(db).is_none(),
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
+ });
+ impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map(
+ |&item| {
+ Some((
+ item,
+ match item {
+ AssocItemId::FunctionId(it) => db.function_data(it).name.clone(),
+ AssocItemId::ConstId(it) => {
+ db.const_data(it).name.as_ref()?.clone()
+ }
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).name.clone(),
+ },
+ ))
+ },
+ ));
+
+ let redundant = impl_assoc_items_scratch
+ .iter()
+ .filter(|(id, name)| {
+ !items.iter().any(|(impl_name, impl_item)| {
+ discriminant(impl_item) == discriminant(id) && impl_name == name
+ })
+ })
+ .map(|(item, name)| (name.clone(), AssocItem::from(*item)));
+ for (name, assoc_item) in redundant {
+ acc.push(
+ TraitImplRedundantAssocItems {
+ trait_,
+ file_id,
+ impl_: ast_id_map.get(node.ast_id()),
+ assoc_item: (name, assoc_item),
+ }
+ .into(),
+ )
+ }
+
+ let missing: Vec<_> = required_items
+ .filter(|(name, id)| {
+ !impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| {
+ discriminant(impl_item) == discriminant(id) && impl_name == name
+ })
+ })
+ .map(|(name, item)| (name.clone(), AssocItem::from(*item)))
+ .collect();
+ if !missing.is_empty() {
+ acc.push(
+ TraitImplMissingAssocItems {
+ impl_: ast_id_map.get(node.ast_id()),
+ file_id,
+ missing,
+ }
+ .into(),
+ )
+ }
+ impl_assoc_items_scratch.clear();
+ }
+
+ for &item in &db.impl_data(impl_def.id).items {
+ let def: DefWithBody = match AssocItem::from(item) {
AssocItem::Function(it) => it.into(),
AssocItem::Const(it) => it.into(),
AssocItem::TypeAlias(_) => continue,
@@ -671,8 +775,15 @@ impl Module {
db: &dyn DefDatabase,
item: impl Into<ItemInNs>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
- hir_def::find_path::find_path(db, item.into().into(), self.into(), prefer_no_std)
+ hir_def::find_path::find_path(
+ db,
+ item.into().into(),
+ self.into(),
+ prefer_no_std,
+ prefer_prelude,
+ )
}
/// Finds a path that can be used to refer to the given item from within
@@ -683,6 +794,7 @@ impl Module {
item: impl Into<ItemInNs>,
prefix_kind: PrefixKind,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
hir_def::find_path::find_path_prefixed(
db,
@@ -690,6 +802,7 @@ impl Module {
self.into(),
prefix_kind,
prefer_no_std,
+ prefer_prelude,
)
}
}
@@ -862,10 +975,9 @@ fn precise_macro_call_location(
// Compute the precise location of the macro name's token in the derive
// list.
let token = (|| {
- let derive_attr = node
- .doc_comments_and_attrs()
+ let derive_attr = collect_attrs(&node)
.nth(derive_attr_index.ast_index())
- .and_then(Either::left)?;
+ .and_then(|x| Either::left(x.1))?;
let token_tree = derive_attr.meta()?.token_tree()?;
let group_by = token_tree
.syntax()
@@ -890,10 +1002,9 @@ fn precise_macro_call_location(
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_node(db.upcast());
- let attr = node
- .doc_comments_and_attrs()
+ let attr = collect_attrs(&node)
.nth(invoc_attr_index.ast_index())
- .and_then(Either::left)
+ .and_then(|x| Either::left(x.1))
.unwrap_or_else(|| {
panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
});
@@ -1453,9 +1564,7 @@ impl DefWithBody {
let (body, source_map) = db.body_with_source_map(self.into());
for (_, def_map) in body.blocks(db.upcast()) {
- for diag in def_map.diagnostics() {
- emit_def_diagnostic(db, acc, diag);
- }
+ Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc);
}
for diag in source_map.diagnostics() {
@@ -1509,10 +1618,10 @@ impl DefWithBody {
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
let expr_or_pat = match expr {
ExprOrPatId::ExprId(expr) => {
- source_map.field_syntax(expr).map(Either::Left)
+ source_map.field_syntax(expr).map(AstPtr::wrap_left)
}
ExprOrPatId::PatId(pat) => {
- source_map.pat_field_syntax(pat).map(Either::Right)
+ source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
}
};
acc.push(NoSuchField { field: expr_or_pat, private }.into())
@@ -1530,8 +1639,8 @@ impl DefWithBody {
}
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
- ExprOrPatId::PatId(pat) => pat_syntax(pat).map(Either::Right),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
@@ -1609,12 +1718,17 @@ impl DefWithBody {
found,
} => {
let expr_or_pat = match pat {
- ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
- ExprOrPatId::PatId(pat) => source_map
- .pat_syntax(pat)
- .expect("unexpected synthetic")
- .map(|it| it.unwrap_left())
- .map(Either::Right),
+ ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
+ ExprOrPatId::PatId(pat) => {
+ let InFile { file_id, value } =
+ source_map.pat_syntax(pat).expect("unexpected synthetic");
+
+ // cast from Either<Pat, SelfParam> -> Either<_, Pat>
+ let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
+ continue;
+ };
+ InFile { file_id, value: ptr }
+ }
};
acc.push(
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
@@ -1628,11 +1742,15 @@ impl DefWithBody {
ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
};
let expr_or_pat = match expr_or_pat {
- Ok(Either::Left(expr)) => Either::Left(expr),
- Ok(Either::Right(InFile { file_id, value: Either::Left(pat) })) => {
- Either::Right(InFile { file_id, value: pat })
+ Ok(Either::Left(expr)) => expr.map(AstPtr::wrap_left),
+ Ok(Either::Right(InFile { file_id, value: pat })) => {
+ // cast from Either<Pat, SelfParam> -> Either<_, Pat>
+ let Some(ptr) = AstPtr::try_from_raw(pat.syntax_node_ptr()) else {
+ continue;
+ };
+ InFile { file_id, value: ptr }
}
- Ok(Either::Right(_)) | Err(SyntheticSyntax) => continue,
+ Err(SyntheticSyntax) => continue,
};
acc.push(
@@ -1667,10 +1785,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@@ -1697,9 +1812,20 @@ impl DefWithBody {
// Skip synthetic bindings
continue;
}
- let need_mut = &mol[local];
+ let mut need_mut = &mol[local];
+ if body[binding_id].name.as_str() == Some("self")
+ && need_mut == &mir::MutabilityReason::Unused
+ {
+ need_mut = &mir::MutabilityReason::Not;
+ }
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
+ (mir::MutabilityReason::Unused, _) => {
+ let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
+ if !should_ignore {
+ acc.push(UnusedVariable { local }.into())
+ }
+ }
(mir::MutabilityReason::Mut { .. }, true)
| (mir::MutabilityReason::Not, false) => (),
(mir::MutabilityReason::Mut { spans }, false) => {
@@ -1710,10 +1836,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|it| match it {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@@ -1752,18 +1875,18 @@ impl DefWithBody {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordExpr(record_expr) =
- &source_ptr.value.to_node(&root)
+ source_ptr.value.to_node(&root)
{
if record_expr.record_expr_field_list().is_some() {
+ let field_list_parent_path =
+ record_expr.path().map(|path| AstPtr::new(&path));
acc.push(
MissingFields {
file: source_ptr.file_id,
- field_list_parent: Either::Left(AstPtr::new(
+ field_list_parent: AstPtr::new(&Either::Left(
record_expr,
)),
- field_list_parent_path: record_expr
- .path()
- .map(|path| AstPtr::new(&path)),
+ field_list_parent_path,
missed_fields,
}
.into(),
@@ -1775,24 +1898,24 @@ impl DefWithBody {
},
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
Ok(source_ptr) => {
- if let Some(expr) = source_ptr.value.as_ref().left() {
+ if let Some(ptr) = source_ptr.value.clone().cast::<ast::RecordPat>()
+ {
let root = source_ptr.file_syntax(db.upcast());
- if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
- if record_pat.record_pat_field_list().is_some() {
- acc.push(
- MissingFields {
- file: source_ptr.file_id,
- field_list_parent: Either::Right(AstPtr::new(
- &record_pat,
- )),
- field_list_parent_path: record_pat
- .path()
- .map(|path| AstPtr::new(&path)),
- missed_fields,
- }
- .into(),
- )
- }
+ let record_pat = ptr.to_node(&root);
+ if record_pat.record_pat_field_list().is_some() {
+ let field_list_parent_path =
+ record_pat.path().map(|path| AstPtr::new(&path));
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: AstPtr::new(&Either::Right(
+ record_pat,
+ )),
+ field_list_parent_path,
+ missed_fields,
+ }
+ .into(),
+ )
}
}
}
@@ -1818,17 +1941,20 @@ impl DefWithBody {
if let ast::Expr::MatchExpr(match_expr) =
&source_ptr.value.to_node(&root)
{
- if let Some(scrut_expr) = match_expr.expr() {
- acc.push(
- MissingMatchArms {
- scrutinee_expr: InFile::new(
- source_ptr.file_id,
- AstPtr::new(&scrut_expr),
- ),
- uncovered_patterns,
- }
- .into(),
- );
+ match match_expr.expr() {
+ Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
+ acc.push(
+ MissingMatchArms {
+ scrutinee_expr: InFile::new(
+ source_ptr.file_id,
+ AstPtr::new(&scrut_expr),
+ ),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ _ => {}
}
}
}
@@ -1960,6 +2086,17 @@ impl Function {
db.function_data(self.id).attrs.is_test()
}
+ /// is this a `fn main` or a function with an `export_name` of `main`?
+ pub fn is_main(self, db: &dyn HirDatabase) -> bool {
+ if !self.module(db).is_crate_root() {
+ return false;
+ }
+ let data = db.function_data(self.id);
+
+ data.name.to_smol_str() == "main"
+ || data.attrs.export_name().map(core::ops::Deref::deref) == Some("main")
+ }
+
/// Does this function have the ignore attribute?
pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).attrs.is_ignore()
@@ -2926,10 +3063,10 @@ impl Local {
.map(|&definition| {
let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
let root = src.file_syntax(db.upcast());
- src.map(|ast| match ast {
- // Suspicious unwrap
- Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
- Either::Right(it) => Either::Right(it.to_node(&root)),
+ src.map(|ast| match ast.to_node(&root) {
+ Either::Left(ast::Pat::IdentPat(it)) => Either::Left(it),
+ Either::Left(_) => unreachable!("local with non ident-pattern"),
+ Either::Right(it) => Either::Right(it),
})
})
.map(move |source| LocalSource { local: self, source })
@@ -3371,13 +3508,46 @@ impl Impl {
db.impl_data(self.id).is_negative
}
+ pub fn is_unsafe(self, db: &dyn HirDatabase) -> bool {
+ db.impl_data(self.id).is_unsafe
+ }
+
pub fn module(self, db: &dyn HirDatabase) -> Module {
self.id.lookup(db.upcast()).container.into()
}
- pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
let src = self.source(db)?;
- src.file_id.as_builtin_derive_attr_node(db.upcast())
+
+ let macro_file = src.file_id.macro_file()?;
+ let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let (derive_attr, derive_index) = match loc.kind {
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
+ let module_id = self.id.lookup(db.upcast()).container;
+ (
+ db.crate_def_map(module_id.krate())[module_id.local_id]
+ .scope
+ .derive_macro_invoc(ast_id, derive_attr_index)?,
+ derive_index,
+ )
+ }
+ _ => return None,
+ };
+ let file_id = MacroFileId { macro_call_id: derive_attr };
+ let path = db
+ .parse_macro_expansion(file_id)
+ .value
+ .0
+ .syntax_node()
+ .children()
+ .nth(derive_index as usize)
+ .and_then(<ast::Attr as AstNode>::cast)
+ .and_then(|it| it.path())?;
+ Some(InMacroFile { file_id, value: path })
+ }
+
+ pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {
+ check_orphan_rules(db, self.id)
}
}
@@ -3393,10 +3563,9 @@ impl TraitRef {
resolver: &Resolver,
trait_ref: hir_ty::TraitRef,
) -> TraitRef {
- let env = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let env = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
TraitRef { env, trait_ref }
}
@@ -3536,15 +3705,14 @@ impl Type {
resolver: &Resolver,
ty: Ty,
) -> Type {
- let environment = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
- Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+ Type { env: TraitEnvironment::empty(krate), ty }
}
pub fn reference(inner: &Type, m: Mutability) -> Type {
@@ -3560,10 +3728,9 @@ impl Type {
fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db.upcast());
- let environment = resolver.generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(resolver.krate())),
- |d| db.trait_environment(d),
- );
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
Type { env: environment, ty }
}
@@ -4133,10 +4300,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
- let environment = scope.resolver().generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(krate.id)),
- |d| db.trait_environment(d),
- );
+ let environment = scope
+ .resolver()
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_method_candidates_dyn(
&canonical,
@@ -4190,10 +4357,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate();
- let environment = scope.resolver().generic_def().map_or_else(
- || Arc::new(TraitEnvironment::empty(krate.id)),
- |d| db.trait_environment(d),
- );
+ let environment = scope
+ .resolver()
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_path_candidates(
&canonical,
@@ -4515,15 +4682,31 @@ impl Layout {
Some(self.0.largest_niche?.available(&*self.1))
}
- pub fn field_offset(&self, idx: usize) -> Option<u64> {
+ pub fn field_offset(&self, field: Field) -> Option<u64> {
match self.0.fields {
layout::FieldsShape::Primitive => None,
layout::FieldsShape::Union(_) => Some(0),
layout::FieldsShape::Array { stride, count } => {
- let i = u64::try_from(idx).ok()?;
+ let i = u64::try_from(field.index()).ok()?;
(i < count).then_some((stride * i).bytes())
}
- layout::FieldsShape::Arbitrary { ref offsets, .. } => Some(offsets.get(idx)?.bytes()),
+ layout::FieldsShape::Arbitrary { ref offsets, .. } => {
+ Some(offsets.get(RustcFieldIdx(field.id))?.bytes())
+ }
+ }
+ }
+
+ pub fn tuple_field_offset(&self, field: usize) -> Option<u64> {
+ match self.0.fields {
+ layout::FieldsShape::Primitive => None,
+ layout::FieldsShape::Union(_) => Some(0),
+ layout::FieldsShape::Array { stride, count } => {
+ let i = u64::try_from(field).ok()?;
+ (i < count).then_some((stride * i).bytes())
+ }
+ layout::FieldsShape::Arbitrary { ref offsets, .. } => {
+ Some(offsets.get(RustcFieldIdx::new(field))?.bytes())
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index a42e0978b..a03ff2207 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -2,7 +2,11 @@
mod source_to_def;
-use std::{cell::RefCell, fmt, iter, mem, ops};
+use std::{
+ cell::RefCell,
+ fmt, iter, mem,
+ ops::{self, ControlFlow, Not},
+};
use base_db::{FileId, FileRange};
use either::Either;
@@ -13,16 +17,21 @@ use hir_def::{
nameres::MacroSubNs,
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
- AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
+ AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo,
+ InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
-use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
+use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
- ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
- match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
+ match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
+ TextRange, TextSize,
};
use crate::{
@@ -35,7 +44,13 @@ use crate::{
TypeAlias, TypeParam, VariantDef,
};
-#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum DescendPreference {
+ SameText,
+ SameKind,
+ None,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathResolution {
/// An item
Def(ModuleDef),
@@ -114,11 +129,12 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
- expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
- // Rootnode to HirFileId cache
+ /// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
- // MacroCall to its expansion's HirFileId cache
- macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+ // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens
+ expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
+ /// MacroCall to its expansion's MacroFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@@ -182,20 +198,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}
- pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
- self.imp.resolve_method_call(call).map(Function::from)
- }
-
- /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
- pub fn resolve_method_call_field_fallback(
- &self,
- call: &ast::MethodCallExpr,
- ) -> Option<Either<Function, Field>> {
- self.imp
- .resolve_method_call_fallback(call)
- .map(|it| it.map_left(Function::from).map_right(Field::from))
- }
-
pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
self.imp.resolve_await_to_poll(await_expr).map(Function::from)
}
@@ -255,7 +257,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
- let node = self.parse_or_expand(file_id);
+ let node = self.parse_or_expand(file_id.into());
Some(node)
}
@@ -388,11 +390,72 @@ impl<'db> SemanticsImpl<'db> {
)
}
+ pub fn as_format_args_parts(
+ &self,
+ string: &ast::String,
+ ) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
+ if let Some(quote) = string.open_quote_text_range() {
+ return self
+ .descend_into_macros(DescendPreference::SameText, string.syntax().clone())
+ .into_iter()
+ .find_map(|token| {
+ let string = ast::String::cast(token)?;
+ let literal =
+ string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
+ let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
+ let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
+ let format_args = self.wrap_node_infile(format_args);
+ let res = source_analyzer
+ .as_format_args_parts(self.db, format_args.as_ref())?
+ .map(|(range, res)| (range + quote.end(), res))
+ .collect();
+ Some(res)
+ });
+ }
+ None
+ }
+
+ pub fn check_for_format_args_template(
+ &self,
+ original_token: SyntaxToken,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ if let Some(original_string) = ast::String::cast(original_token.clone()) {
+ if let Some(quote) = original_string.open_quote_text_range() {
+ return self
+ .descend_into_macros(DescendPreference::SameText, original_token.clone())
+ .into_iter()
+ .find_map(|token| {
+ self.resolve_offset_in_format_args(
+ ast::String::cast(token)?,
+ offset - quote.end(),
+ )
+ })
+ .map(|(range, res)| (range + quote.end(), res));
+ }
+ }
+ None
+ }
+
+ fn resolve_offset_in_format_args(
+ &self,
+ string: ast::String,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ debug_assert!(offset <= string.syntax().text_range().len());
+ let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
+ let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
+ let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
+ let format_args = self.wrap_node_infile(format_args);
+ source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
+ }
+
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now
let mut res = smallvec![];
let tokens = (|| {
+ // FIXME: the trivia skipping should not be necessary
let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
Some((first, last))
@@ -403,24 +466,28 @@ impl<'db> SemanticsImpl<'db> {
};
if first == last {
- self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
- if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ // node is just the token, so descend the token
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value
+ .parent_ancestors()
+ .take_while(|it| it.text_range() == value.text_range())
+ .find_map(N::cast)
+ {
res.push(node)
}
- false
+ ControlFlow::Continue(())
});
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
- self.descend_into_macros_impl(first, 0.into(), &mut |token| {
+ self.descend_into_macros_impl(first, &mut |token| {
scratch.push(token);
- false
+ ControlFlow::Continue(())
});
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
- 0.into(),
&mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@@ -437,7 +504,7 @@ impl<'db> SemanticsImpl<'db> {
}
}
}
- false
+ ControlFlow::Continue(())
},
);
}
@@ -449,32 +516,42 @@ impl<'db> SemanticsImpl<'db> {
/// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
- offset: TextSize,
- ) -> SmallVec<[SyntaxToken; 1]> {
- let mut res = smallvec![];
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
- res.push(value);
- false
- });
- res
- }
-
- /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
- ///
- /// Returns the original non descended token if none of the mapped counterparts have the same text.
- pub fn descend_into_macros_with_same_text(
- &self,
- token: SyntaxToken,
- offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
- let text = token.text();
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
let mut res = smallvec![];
- self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if value.text() == text {
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
+ }
+ Dp::None => true,
+ };
+ if is_a_match {
res.push(value);
}
- false
+ ControlFlow::Continue(())
});
if res.is_empty() {
res.push(token);
@@ -482,44 +559,46 @@ impl<'db> SemanticsImpl<'db> {
res
}
- pub fn descend_into_macros_with_kind_preference(
+ pub fn descend_into_macros_single(
&self,
+ mode: DescendPreference,
token: SyntaxToken,
- offset: TextSize,
) -> SyntaxToken {
+ enum Dp<'t> {
+ SameText(&'t str),
+ SameKind(SyntaxKind),
+ None,
+ }
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
- kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
- node.parent().map_or(kind, |it| it.kind())
- }
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
- let preferred_kind = fetch_kind(&token);
- let mut res = None;
- self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
- if fetch_kind(&value) == preferred_kind {
- res = Some(value);
- true
- } else {
- if let None = res {
- res = Some(value)
- }
- false
- }
- });
- res.unwrap_or(token)
- }
-
- /// Descend the token into its macro call if it is part of one, returning the token in the
- /// expansion that it is associated with. If `offset` points into the token's range, it will
- /// be considered for the mapping in case of inline format args.
- pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
+ let mode = match mode {
+ DescendPreference::SameText => Dp::SameText(token.text()),
+ DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
+ DescendPreference::None => Dp::None,
+ };
let mut res = token.clone();
- self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ let is_a_match = match mode {
+ Dp::SameText(text) => value.text() == text,
+ Dp::SameKind(preferred_kind) => {
+ let kind = fetch_kind(&value);
+ kind == preferred_kind
+ // special case for derive macros
+ || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
+ }
+ Dp::None => true,
+ };
res = value;
- true
+ if is_a_match {
+ ControlFlow::Break(())
+ } else {
+ ControlFlow::Continue(())
+ }
});
res
}
@@ -527,177 +606,204 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
- // FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
- // mapping, specifically for node downmapping
- offset: TextSize,
- f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
let _p = profile::span("descend_into_macros");
- let relative_token_offset = token.text_range().start().checked_sub(offset);
- let parent = match token.parent() {
+ let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(it) => it,
None => return,
};
- let sa = match self.analyze_no_infer(&parent) {
- Some(it) => it,
- None => return,
+
+ let span = match sa.file_id.file_id() {
+ Some(file_id) => self.db.real_span_map(file_id).span_for_range(token.text_range()),
+ None => {
+ stdx::never!();
+ return;
+ }
};
- let def_map = sa.resolver.def_map();
- let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
+ let def_map = sa.resolver.def_map();
- let mut process_expansion_for_token =
- |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
- let expansion_info = cache
- .entry(macro_file)
- .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
- .as_ref()?;
+ let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
- {
- let InFile { file_id, value } = expansion_info.expanded();
- self.cache(value, file_id);
- }
+ {
+ let InMacroFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id.into());
+ }
- let mapped_tokens = expansion_info.map_token_down(
- self.db.upcast(),
- item,
- token,
- relative_token_offset,
- )?;
- let len = stack.len();
-
- // requeue the tokens we got from mapping our current token down
- stack.extend(mapped_tokens);
- // if the length changed we have found a mapping for the token
- (stack.len() != len).then_some(())
- };
+ let InMacroFile { file_id, value: mapped_tokens } =
+ expansion_info.map_range_down(span)?;
+ let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
- // Remap the next token in the queue into a macro call its in, if it is not being remapped
- // either due to not being in a macro-call or because its unused push it into the result vec,
- // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
- while let Some(token) = stack.pop() {
- self.db.unwind_if_cancelled();
- let was_not_remapped = (|| {
- // First expand into attribute invocations
- let containing_attribute_macro_call = self.with_ctx(|ctx| {
- token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
- if item.attrs().next().is_none() {
- // Don't force populate the dyn cache for items that don't have an attribute anyways
- return None;
- }
- Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
- })
- });
- if let Some((call_id, item)) = containing_attribute_macro_call {
- let file_id = call_id.as_file();
- return process_expansion_for_token(
- &mut stack,
- file_id,
- Some(item),
- token.as_ref(),
- );
- }
+ // if the length changed we have found a mapping for the token
+ let res = mapped_tokens.is_empty().not().then_some(());
+ // requeue the tokens we got from mapping our current token down
+ stack.push((HirFileId::from(file_id), mapped_tokens));
+ res
+ };
- // Then check for token trees, that means we are either in a function-like macro or
- // secondary attribute inputs
- let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
- let parent = tt.syntax().parent()?;
+ let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(sa.file_id, smallvec![token])];
+
+ while let Some((file_id, mut tokens)) = stack.pop() {
+ while let Some(token) = tokens.pop() {
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((
+ ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?,
+ item,
+ ))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_macro_file();
+ let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
+ hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
+ invoc_attr_index.ast_index()
+ }
+ _ => 0,
+ };
+ // FIXME: here, the attribute's text range is used to strip away all
+ // entries from the start of the attribute "list" up the the invoking
+ // attribute. But in
+ // ```
+ // mod foo {
+ // #![inner]
+ // }
+ // ```
+ // we don't wanna strip away stuff in the `mod foo {` range, that is
+ // here if the id corresponds to an inner attribute we got strip all
+ // text ranges of the outer ones, and then all of the inner ones up
+ // to the invoking attribute so that the inbetween is ignored.
+ let text_range = item.syntax().text_range();
+ let start = collect_attrs(&item)
+ .nth(attr_id)
+ .map(|attr| match attr.1 {
+ Either::Left(it) => it.syntax().text_range().start(),
+ Either::Right(it) => it.syntax().text_range().start(),
+ })
+ .unwrap_or_else(|| text_range.start());
+ let text_range = TextRange::new(start, text_range.end());
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ return process_expansion_for_token(&mut stack, file_id);
+ }
- if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
- return None;
- }
- if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
- return None;
- }
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
- if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
- let mcall = token.with_value(macro_call);
- let file_id = match mcache.get(&mcall) {
- Some(&it) => it,
- None => {
- let it = sa.expand(self.db, mcall.as_ref())?;
- mcache.insert(mcall, it);
- it
- }
- };
- process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
- } else if let Some(meta) = ast::Meta::cast(parent) {
- // attribute we failed expansion for earlier, this might be a derive invocation
- // or derive helper attribute
- let attr = meta.parent_attr()?;
-
- let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
- // this might be a derive, or a derive helper on an ADT
- let derive_call = self.with_ctx(|ctx| {
- // so try downmapping the token into the pseudo derive expansion
- // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
- ctx.attr_to_derive_macro_call(
- token.with_value(&adt),
- token.with_value(attr.clone()),
- )
- .map(|(_, call_id, _)| call_id)
- });
-
- match derive_call {
- Some(call_id) => {
- // resolved to a derive
- let file_id = call_id.as_file();
- return process_expansion_for_token(
- &mut stack,
- file_id,
- Some(adt.into()),
- token.as_ref(),
- );
+ if tt.left_delimiter_token().map_or(false, |it| it == token) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
+ InFile::new(file_id, macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
}
- None => Some(adt),
- }
- } else {
- // Otherwise this could be a derive helper on a variant or field
- if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ };
+ let text_range = tt.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ process_expansion_for_token(&mut stack, file_id)
+ } else if let Some(meta) = ast::Meta::cast(parent) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast)
{
- field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
- } else if let Some(field) =
- attr.syntax().parent().and_then(ast::TupleField::cast)
- {
- field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
- } else if let Some(variant) =
- attr.syntax().parent().and_then(ast::Variant::cast)
- {
- variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ InFile::new(file_id, &adt),
+ InFile::new(file_id, attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_macro_file();
+ let text_range = attr.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this one
+ tokens.retain(|t| !text_range.contains_range(t.text_range()));
+ return process_expansion_for_token(&mut stack, file_id);
+ }
+ None => Some(adt),
+ }
} else {
- None
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
+ return None;
}
- }?;
- if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
- return None;
- }
- // Not an attribute, nor a derive, so it's either a builtin or a derive helper
- // Try to resolve to a derive helper and downmap
- let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
- let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
- let helpers =
- def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
- let item = Some(adt.into());
- let mut res = None;
- for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
- res = res.or(process_expansion_for_token(
- &mut stack,
- derive.as_file(),
- item.clone(),
- token.as_ref(),
- ));
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name =
+ attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(file_id).ast_id(&adt);
+ let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
+ let mut res = None;
+ for (.., derive) in
+ helpers.iter().filter(|(helper, ..)| *helper == attr_name)
+ {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_macro_file(),
+ ));
+ }
+ res
+ } else {
+ None
}
- res
- } else {
- None
- }
- })()
- .is_none();
+ })()
+ .is_none();
- if was_not_remapped && f(token) {
- break;
+ if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
+ break;
+ }
}
}
}
@@ -712,7 +818,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
- .map(move |token| self.descend_into_macros(token, offset))
+ .map(move |token| self.descend_into_macros(DescendPreference::None, token))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})
@@ -737,14 +843,16 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast())
+ .filter(|(_, ctx)| ctx.is_root())
+ .map(TupleExt::head)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
- |InFile { file_id, value }| {
- self.cache(find_root(value.syntax()), file_id);
+ |InRealFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id.into());
value
},
)
@@ -755,8 +863,8 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
- |InFile { file_id, value }| {
- self.cache(find_root(&value), file_id);
+ |InRealFile { file_id, value }| {
+ self.cache(find_root(&value), file_id.into());
value
},
)
@@ -787,7 +895,7 @@ impl<'db> SemanticsImpl<'db> {
Some(parent) => Some(InFile::new(file_id, parent)),
None => {
self.cache(value.clone(), file_id);
- file_id.call_node(db)
+ Some(file_id.macro_file()?.call_node(db))
}
}
})
@@ -840,10 +948,10 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
let analyze = self.analyze(ty.syntax())?;
let ctx = LowerCtx::with_file_id(self.db.upcast(), analyze.file_id);
- let ty = hir_ty::TyLoweringContext::new(
+ let ty = hir_ty::TyLoweringContext::new_maybe_unowned(
self.db,
&analyze.resolver,
- analyze.resolver.module().into(),
+ analyze.resolver.type_owner(),
)
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
@@ -851,9 +959,9 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?;
- let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
- let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
- let hir_path = Path::from_src(path.clone(), &ctx)?;
+ let span_map = self.db.span_map(analyze.file_id);
+ let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
+ let hir_path = Path::from_src(&ctx, path.clone())?;
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
TypeNs::TraitId(id) => Some(Trait { id }),
_ => None,
@@ -937,14 +1045,15 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
- fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.analyze(call.syntax())?.resolve_method_call(self.db, call)
}
- fn resolve_method_call_fallback(
+ /// Attempts to resolve this call expression as a method call falling back to resolving it as a field.
+ pub fn resolve_method_call_fallback(
&self,
call: &ast::MethodCallExpr,
- ) -> Option<Either<FunctionId, FieldId>> {
+ ) -> Option<Either<Function, Field>> {
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
}
@@ -976,6 +1085,13 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(field.syntax())?.resolve_field(self.db, field)
}
+ pub fn resolve_field_fallback(
+ &self,
+ field: &ast::FieldExpr,
+ ) -> Option<Either<Field, Function>> {
+ self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
+ }
+
pub fn resolve_record_field(
&self,
field: &ast::RecordExprField,
@@ -1037,7 +1153,7 @@ impl<'db> SemanticsImpl<'db> {
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut();
- let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache };
+ let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
f(&mut ctx)
}
@@ -1187,7 +1303,7 @@ impl<'db> SemanticsImpl<'db> {
return None;
}
- let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let func = self.resolve_method_call(method_call_expr)?;
let res = match func.self_param(self.db)?.access(self.db) {
Access::Shared | Access::Exclusive => true,
Access::Owned => false,
@@ -1451,7 +1567,7 @@ impl SemanticsScope<'_> {
/// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
- let path = Path::from_src(path.clone(), &ctx)?;
+ let path = Path::from_src(&ctx, path.clone())?;
resolve_hir_path(self.db, &self.resolver, &path)
}
@@ -1478,6 +1594,10 @@ impl SemanticsScope<'_> {
pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
self.resolver.extern_crate_decls_in_scope(self.db.upcast())
}
+
+ pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
+ self.resolver.impl_def() == other.resolver.impl_def()
+ }
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index aabda3655..df8c1e904 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -97,7 +97,7 @@ use hir_def::{
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
-use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
+use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use stdx::{impl_from, never};
@@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap
pub(super) struct SourceToDefCtx<'a, 'b> {
pub(super) db: &'b dyn HirDatabase,
- pub(super) cache: &'a mut SourceToDefCache,
+ pub(super) dynmap_cache: &'a mut SourceToDefCache,
}
impl SourceToDefCtx<'_, '_> {
@@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db;
- self.cache
+ self.dynmap_cache
.entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id))
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index f29fb1edf..d05118bbc 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -26,11 +26,10 @@ use hir_def::{
};
use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
- hygiene::Hygiene,
mod_path::path,
name,
name::{AsName, Name},
- HirFileId, InFile,
+ HirFileId, InFile, MacroFileId, MacroFileIdExt,
};
use hir_ty::{
diagnostics::{
@@ -236,9 +235,9 @@ impl SourceAnalyzer {
_db: &dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<BindingMode> {
- let binding_id = self.binding_id_of_pat(pat)?;
+ let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer.as_ref()?;
- infer.binding_modes.get(binding_id).map(|bm| match bm {
+ infer.binding_modes.get(id).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@@ -281,25 +280,49 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<FunctionId> {
+ ) -> Option<Function> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
- Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
+ Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into())
}
pub(crate) fn resolve_method_call_fallback(
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
- ) -> Option<Either<FunctionId, FieldId>> {
+ ) -> Option<Either<Function, Field>> {
let expr_id = self.expr_id(db, &call.clone().into())?;
let inference_result = self.infer.as_ref()?;
match inference_result.method_resolution(expr_id) {
- Some((f_in_trait, substs)) => {
- Some(Either::Left(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs)))
- }
- None => inference_result.field_resolution(expr_id).map(Either::Right),
+ Some((f_in_trait, substs)) => Some(Either::Left(
+ self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(),
+ )),
+ None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right),
+ }
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_field_fallback(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Either<Field, Function>> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ let inference_result = self.infer.as_ref()?;
+ match inference_result.field_resolution(expr_id) {
+ Some(field) => Some(Either::Left(field.into())),
+ None => inference_result.method_resolution(expr_id).map(|(f, substs)| {
+ Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into())
+ }),
}
}
@@ -418,15 +441,6 @@ impl SourceAnalyzer {
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
- pub(crate) fn resolve_field(
- &self,
- db: &dyn HirDatabase,
- field: &ast::FieldExpr,
- ) -> Option<Field> {
- let expr_id = self.expr_id(db, &field.clone().into())?;
- self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
- }
-
pub(crate) fn resolve_record_field(
&self,
db: &dyn HirDatabase,
@@ -484,7 +498,7 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> {
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
- let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|(it, _)| it.into())
@@ -596,9 +610,8 @@ impl SourceAnalyzer {
}
// This must be a normal source file rather than macro file.
- let hygiene = Hygiene::new(db.upcast(), self.file_id);
- let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene);
- let hir_path = Path::from_src(path.clone(), &ctx)?;
+ let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
+ let hir_path = Path::from_src(&ctx, path.clone())?;
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@@ -755,14 +768,15 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
- ) -> Option<HirFileId> {
+ ) -> Option<MacroFileId> {
let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?;
- Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ // why the 64?
+ Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
}
pub(crate) fn resolve_variant(
@@ -821,6 +835,52 @@ impl SourceAnalyzer {
false
}
+ pub(crate) fn resolve_offset_in_format_args(
+ &self,
+ db: &dyn HirDatabase,
+ format_args: InFile<&ast::FormatArgsExpr>,
+ offset: TextSize,
+ ) -> Option<(TextRange, Option<PathResolution>)> {
+ let implicits = self.body_source_map()?.implicit_format_args(format_args)?;
+ implicits.iter().find(|(range, _)| range.contains_inclusive(offset)).map(|(range, name)| {
+ (
+ *range,
+ resolve_hir_value_path(
+ db,
+ &self.resolver,
+ self.resolver.body_owner(),
+ &Path::from_known_path_with_no_generic(ModPath::from_segments(
+ PathKind::Plain,
+ Some(name.clone()),
+ )),
+ ),
+ )
+ })
+ }
+
+ pub(crate) fn as_format_args_parts<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ format_args: InFile<&ast::FormatArgsExpr>,
+ ) -> Option<impl Iterator<Item = (TextRange, Option<PathResolution>)> + 'a> {
+ Some(self.body_source_map()?.implicit_format_args(format_args)?.iter().map(
+ move |(range, name)| {
+ (
+ *range,
+ resolve_hir_value_path(
+ db,
+ &self.resolver,
+ self.resolver.body_owner(),
+ &Path::from_known_path_with_no_generic(ModPath::from_segments(
+ PathKind::Plain,
+ Some(name.clone()),
+ )),
+ ),
+ )
+ },
+ ))
+ }
+
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,
@@ -888,17 +948,18 @@ fn scope_for_offset(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
- let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ let InFile { file_id, value } = source_map.expr_syntax(id).ok()?;
if from_file == file_id {
return Some((value.text_range(), scope));
}
// FIXME handle attribute expansion
- let source = iter::successors(file_id.call_node(db.upcast()), |it| {
- it.file_id.call_node(db.upcast())
- })
- .find(|it| it.file_id == from_file)
- .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ let source =
+ iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| {
+ Some(it.file_id.macro_file()?.call_node(db.upcast()))
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
Some((source.value.text_range(), scope))
})
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
@@ -923,7 +984,7 @@ fn adjust(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
- let source = source_map.expr_syntax(*id).ok()?;
+ let source = source_map.expr_syntax(id).ok()?;
// FIXME: correctly handle macro expansion
if source.file_id != from_file {
return None;
@@ -979,8 +1040,9 @@ fn resolve_hir_path_(
let types = || {
let (ty, unresolved) = match path.type_anchor() {
Some(type_ref) => {
- let (_, res) = TyLoweringContext::new(db, resolver, resolver.module().into())
- .lower_ty_ext(type_ref);
+ let (_, res) =
+ TyLoweringContext::new_maybe_unowned(db, resolver, resolver.type_owner())
+ .lower_ty_ext(type_ref);
res.map(|ty_ns| (ty_ns, path.segments().first()))
}
None => {
@@ -1039,24 +1101,7 @@ fn resolve_hir_path_(
};
let body_owner = resolver.body_owner();
- let values = || {
- resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
- let res = match val {
- ValueNs::LocalBinding(binding_id) => {
- let var = Local { parent: body_owner?, binding_id };
- PathResolution::Local(var)
- }
- ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
- ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
- ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
- ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
- ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
- ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
- ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
- };
- Some(res)
- })
- };
+ let values = || resolve_hir_value_path(db, resolver, body_owner, path);
let items = || {
resolver
@@ -1076,6 +1121,30 @@ fn resolve_hir_path_(
.or_else(macros)
}
+fn resolve_hir_value_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ body_owner: Option<DefWithBodyId>,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(binding_id) => {
+ let var = Local { parent: body_owner?, binding_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+}
+
/// Resolves a path where we know it is a qualifier of another path.
///
/// For example, if we have:
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index ca7874c36..a2a30edeb 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -9,7 +9,7 @@ use hir_def::{
};
use hir_expand::{HirFileId, InFile};
use hir_ty::db::HirDatabase;
-use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
+use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr};
use crate::{Module, ModuleDef, Semantics};
@@ -23,6 +23,7 @@ pub struct FileSymbol {
pub loc: DeclarationLocation,
pub container_name: Option<SmolStr>,
pub is_alias: bool,
+ pub is_assoc: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -32,7 +33,7 @@ pub struct DeclarationLocation {
/// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
- pub name_ptr: SyntaxNodePtr,
+ pub name_ptr: AstPtr<syntax::ast::Name>,
}
impl DeclarationLocation {
@@ -49,15 +50,6 @@ impl DeclarationLocation {
let node = resolve_node(db, self.hir_file_id, &self.ptr);
node.as_ref().original_file_range(db.upcast())
}
-
- pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
- if let Some(file_id) = self.hir_file_id.file_id() {
- // fast path to prevent parsing
- return Some(FileRange { file_id, range: self.name_ptr.text_range() });
- }
- let node = resolve_node(db, self.hir_file_id, &self.name_ptr);
- node.as_ref().original_file_range_opt(db.upcast())
- }
}
fn resolve_node(
@@ -130,34 +122,34 @@ impl<'a> SymbolCollector<'a> {
match module_def_id {
ModuleDefId::ModuleId(id) => self.push_module(id),
ModuleDefId::FunctionId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
- ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id),
- ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id),
+ ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false),
ModuleDefId::ConstId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::StaticId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_body(id);
}
ModuleDefId::TraitId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
self.collect_from_trait(id);
}
ModuleDefId::TraitAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::TypeAliasId(id) => {
- self.push_decl(id);
+ self.push_decl(id, false);
}
ModuleDefId::MacroId(id) => match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
},
// Don't index these.
ModuleDefId::BuiltinType(_) => {}
@@ -190,7 +182,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
- name_ptr: SyntaxNodePtr::new(name.syntax()),
+ name_ptr: AstPtr::new(&name),
};
self.symbols.push(FileSymbol {
@@ -199,6 +191,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
});
}
@@ -211,9 +204,9 @@ impl<'a> SymbolCollector<'a> {
for &id in id {
if id.module(self.db.upcast()) == module_id {
match id {
- MacroId::Macro2Id(id) => self.push_decl(id),
- MacroId::MacroRulesId(id) => self.push_decl(id),
- MacroId::ProcMacroId(id) => self.push_decl(id),
+ MacroId::Macro2Id(id) => self.push_decl(id, false),
+ MacroId::MacroRulesId(id) => self.push_decl(id, false),
+ MacroId::ProcMacroId(id) => self.push_decl(id, false),
}
}
}
@@ -275,13 +268,13 @@ impl<'a> SymbolCollector<'a> {
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
match assoc_item_id {
- AssocItemId::FunctionId(id) => self.push_decl(id),
- AssocItemId::ConstId(id) => self.push_decl(id),
- AssocItemId::TypeAliasId(id) => self.push_decl(id),
+ AssocItemId::FunctionId(id) => self.push_decl(id, true),
+ AssocItemId::ConstId(id) => self.push_decl(id, true),
+ AssocItemId::TypeAliasId(id) => self.push_decl(id, true),
}
}
- fn push_decl<L>(&mut self, id: L)
+ fn push_decl<L>(&mut self, id: L, is_assoc: bool)
where
L: Lookup + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource,
@@ -294,7 +287,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ name_ptr: AstPtr::new(&name_node),
};
if let Some(attrs) = def.attrs(self.db) {
@@ -305,6 +298,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc,
});
}
}
@@ -315,6 +309,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc,
});
}
@@ -327,7 +322,7 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ name_ptr: AstPtr::new(&name_node),
};
let def = ModuleDef::Module(module_id.into());
@@ -340,6 +335,7 @@ impl<'a> SymbolCollector<'a> {
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
is_alias: true,
+ is_assoc: false,
});
}
}
@@ -350,6 +346,7 @@ impl<'a> SymbolCollector<'a> {
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
+ is_assoc: false,
});
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
index 447e38f91..a622ec1a9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
@@ -14,8 +14,8 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
-either = "1.7.0"
+itertools.workspace = true
+either.workspace = true
smallvec.workspace = true
# local deps
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
index b273ebc85..fbe17dbfd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_config.rs
@@ -14,5 +14,6 @@ pub struct AssistConfig {
pub allowed: Option<Vec<AssistKind>>,
pub insert_use: InsertUseConfig,
pub prefer_no_std: bool,
+ pub prefer_prelude: bool,
pub assist_emit_must_use: bool,
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index c0e5429a2..410c62310 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -2249,4 +2249,35 @@ impl b::LocalTrait for B {
"#,
)
}
+
+ #[test]
+ fn doc_hidden_nondefault_member() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+//- /lib.rs crate:b new_source_root:local
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_non_default() -> Option<()>;
+
+ #[doc(hidden)]
+ fn skip_default() -> Option<()> {
+ todo!()
+ }
+}
+
+//- /main.rs crate:a deps:b
+struct B;
+impl b::Loc$0alTrait for B {}
+ "#,
+ r#"
+struct B;
+impl b::LocalTrait for B {
+ fn no_skip_non_default() -> Option<()> {
+ ${0:todo!()}
+ }
+}
+ "#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 3b162d7c4..2374da9a3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -88,7 +88,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.into_iter()
.filter_map(|variant| {
Some((
- build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std)?,
+ build_pat(
+ ctx.db(),
+ module,
+ variant,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?,
variant.should_be_hidden(ctx.db(), module.krate()),
))
})
@@ -140,7 +146,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| {
- build_pat(ctx.db(), module, variant, ctx.config.prefer_no_std)
+ build_pat(
+ ctx.db(),
+ module,
+ variant,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
});
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
@@ -173,7 +185,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants.into_iter().filter_map(|variant| {
- build_pat(ctx.db(), module, variant.clone(), ctx.config.prefer_no_std)
+ build_pat(
+ ctx.db(),
+ module,
+ variant.clone(),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
});
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
})
@@ -273,9 +291,10 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
syntax::SyntaxElement::Token(it) => {
// Don't have a way to make tokens mut, so instead make the parent mut
// and find the token again
- let parent = edit.make_syntax_mut(it.parent().unwrap());
+ let parent =
+ edit.make_syntax_mut(it.parent().expect("Token must have a parent."));
let mut_token =
- parent.covering_element(it.text_range()).into_token().unwrap();
+ parent.covering_element(it.text_range()).into_token().expect("Covering element cannot be found. Range may be beyond the current node's range");
syntax::SyntaxElement::from(mut_token)
}
@@ -439,28 +458,35 @@ fn build_pat(
module: hir::Module,
var: ExtendedVariant,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ast::Pat> {
match var {
ExtendedVariant::Variant(var) => {
- let path =
- mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var), prefer_no_std)?);
+ let path = mod_path_to_ast(&module.find_use_path(
+ db,
+ ModuleDef::from(var),
+ prefer_no_std,
+ prefer_prelude,
+ )?);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
- let pat: ast::Pat = match var.source(db)?.value.kind() {
+ Some(match var.source(db)?.value.kind() {
ast::StructKind::Tuple(field_list) => {
let pats =
iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count());
make::tuple_struct_pat(path, pats).into()
}
ast::StructKind::Record(field_list) => {
- let pats = field_list
- .fields()
- .map(|f| make::ext::simple_ident_pat(f.name().unwrap()).into());
+ let pats = field_list.fields().map(|f| {
+ make::ext::simple_ident_pat(
+ f.name().expect("Record field must have a name"),
+ )
+ .into()
+ });
make::record_pat(path, pats).into()
}
ast::StructKind::Unit => make::path_pat(path),
- };
- Some(pat)
+ })
}
ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
@@ -1941,4 +1967,35 @@ fn main() {
"#,
);
}
+
+ /// See [`discussion`](https://github.com/rust-lang/rust-analyzer/pull/15594#discussion_r1322960614)
+ #[test]
+ fn missing_field_name() {
+ check_assist(
+ add_missing_match_arms,
+ r#"
+enum A {
+ A,
+ Missing { a: u32, : u32, c: u32 }
+}
+
+fn a() {
+ let b = A::A;
+ match b$0 {}
+}"#,
+ r#"
+enum A {
+ A,
+ Missing { a: u32, : u32, c: u32 }
+}
+
+fn a() {
+ let b = A::A;
+ match b {
+ $0A::A => todo!(),
+ A::Missing { a, u32, c } => todo!(),
+ }
+}"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
index 36f68d176..88fd0b1b7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -1,6 +1,9 @@
+use either::Either;
use ide_db::defs::{Definition, NameRefClass};
-use itertools::Itertools;
-use syntax::{ast, AstNode, SyntaxKind, T};
+use syntax::{
+ ast::{self, make, HasArgList},
+ ted, AstNode,
+};
use crate::{
assist_context::{AssistContext, Assists},
@@ -25,21 +28,45 @@ use crate::{
// }
// ```
pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| {
- let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
- if arg_list.args().next().is_some() {
- return None;
- }
- cov_mark::hit!(add_turbo_fish_after_call);
- cov_mark::hit!(add_type_ascription_after_call);
- arg_list.l_paren_token()?.prev_token().filter(|it| it.kind() == SyntaxKind::IDENT)
- })?;
- let next_token = ident.next_token()?;
- if next_token.kind() == T![::] {
+ let turbofish_target =
+ ctx.find_node_at_offset::<ast::PathSegment>().map(Either::Left).or_else(|| {
+ let callable_expr = ctx.find_node_at_offset::<ast::CallableExpr>()?;
+
+ if callable_expr.arg_list()?.args().next().is_some() {
+ return None;
+ }
+
+ cov_mark::hit!(add_turbo_fish_after_call);
+ cov_mark::hit!(add_type_ascription_after_call);
+
+ match callable_expr {
+ ast::CallableExpr::Call(it) => {
+ let ast::Expr::PathExpr(path) = it.expr()? else {
+ return None;
+ };
+
+ Some(Either::Left(path.path()?.segment()?))
+ }
+ ast::CallableExpr::MethodCall(it) => Some(Either::Right(it)),
+ }
+ })?;
+
+ let already_has_turbofish = match &turbofish_target {
+ Either::Left(path_segment) => path_segment.generic_arg_list().is_some(),
+ Either::Right(method_call) => method_call.generic_arg_list().is_some(),
+ };
+
+ if already_has_turbofish {
cov_mark::hit!(add_turbo_fish_one_fish_is_enough);
return None;
}
- let name_ref = ast::NameRef::cast(ident.parent()?)?;
+
+ let name_ref = match &turbofish_target {
+ Either::Left(path_segment) => path_segment.name_ref()?,
+ Either::Right(method_call) => method_call.name_ref()?,
+ };
+ let ident = name_ref.ident_token()?;
+
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
NameRefClass::Definition(def) => def,
NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
@@ -58,20 +85,27 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
if let Some(let_stmt) = ctx.find_node_at_offset::<ast::LetStmt>() {
if let_stmt.colon_token().is_none() {
- let type_pos = let_stmt.pat()?.syntax().last_token()?.text_range().end();
- let semi_pos = let_stmt.syntax().last_token()?.text_range().end();
+ if let_stmt.pat().is_none() {
+ return None;
+ }
acc.add(
AssistId("add_type_ascription", AssistKind::RefactorRewrite),
"Add `: _` before assignment operator",
ident.text_range(),
- |builder| {
+ |edit| {
+ let let_stmt = edit.make_mut(let_stmt);
+
if let_stmt.semicolon_token().is_none() {
- builder.insert(semi_pos, ";");
+ ted::append_child(let_stmt.syntax(), make::tokens::semicolon());
}
- match ctx.config.snippet_cap {
- Some(cap) => builder.insert_snippet(cap, type_pos, ": ${0:_}"),
- None => builder.insert(type_pos, ": _"),
+
+ let placeholder_ty = make::ty_placeholder().clone_for_update();
+
+ let_stmt.set_ty(Some(placeholder_ty.clone()));
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ edit.add_placeholder_snippet(cap, placeholder_ty);
}
},
)?
@@ -91,38 +125,46 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
AssistId("add_turbo_fish", AssistKind::RefactorRewrite),
"Add `::<>`",
ident.text_range(),
- |builder| {
- builder.trigger_signature_help();
- match ctx.config.snippet_cap {
- Some(cap) => {
- let fish_head = get_snippet_fish_head(number_of_arguments);
- let snip = format!("::<{fish_head}>");
- builder.insert_snippet(cap, ident.text_range().end(), snip)
+ |edit| {
+ edit.trigger_signature_help();
+
+ let new_arg_list = match turbofish_target {
+ Either::Left(path_segment) => {
+ edit.make_mut(path_segment).get_or_create_generic_arg_list()
+ }
+ Either::Right(method_call) => {
+ edit.make_mut(method_call).get_or_create_generic_arg_list()
}
- None => {
- let fish_head = std::iter::repeat("_").take(number_of_arguments).format(", ");
- let snip = format!("::<{fish_head}>");
- builder.insert(ident.text_range().end(), snip);
+ };
+
+ let fish_head = get_fish_head(number_of_arguments).clone_for_update();
+
+ // Note: we need to replace the `new_arg_list` instead of being able to use something like
+ // `GenericArgList::add_generic_arg` as `PathSegment::get_or_create_generic_arg_list`
+ // always creates a non-turbofish form generic arg list.
+ ted::replace(new_arg_list.syntax(), fish_head.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ for arg in fish_head.generic_args() {
+ edit.add_placeholder_snippet(cap, arg)
}
}
},
)
}
-/// This will create a snippet string with tabstops marked
-fn get_snippet_fish_head(number_of_arguments: usize) -> String {
- let mut fish_head = (1..number_of_arguments)
- .format_with("", |i, f| f(&format_args!("${{{i}:_}}, ")))
- .to_string();
-
- // tabstop 0 is a special case and always the last one
- fish_head.push_str("${0:_}");
- fish_head
+/// This will create a turbofish generic arg list corresponding to the number of arguments
+fn get_fish_head(number_of_arguments: usize) -> ast::GenericArgList {
+ let args = (0..number_of_arguments).map(|_| make::type_arg(make::ty_placeholder()).into());
+ make::turbofish_generic_arg_list(args)
}
#[cfg(test)]
mod tests {
- use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
+ use crate::tests::{
+ check_assist, check_assist_by_label, check_assist_not_applicable,
+ check_assist_not_applicable_by_label,
+ };
use super::*;
@@ -364,6 +406,20 @@ fn main() {
}
#[test]
+ fn add_type_ascription_missing_pattern() {
+ check_assist_not_applicable_by_label(
+ add_turbo_fish,
+ r#"
+fn make<T>() -> T {}
+fn main() {
+ let = make$0()
+}
+"#,
+ "Add `: _` before assignment operator",
+ );
+ }
+
+ #[test]
fn add_turbo_fish_function_lifetime_parameter() {
check_assist(
add_turbo_fish,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
index 66bc2f6da..2d41243c2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -1,7 +1,13 @@
use std::collections::VecDeque;
+use ide_db::{
+ assists::GroupLabel,
+ famous_defs::FamousDefs,
+ source_change::SourceChangeBuilder,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+};
use syntax::{
- ast::{self, AstNode, Expr::BinExpr},
+ ast::{self, make, AstNode, Expr::BinExpr, HasArgList},
ted::{self, Position},
SyntaxKind,
};
@@ -89,7 +95,8 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let dm_lhs = demorganed.lhs()?;
- acc.add(
+ acc.add_group(
+ &GroupLabel("Apply De Morgan's law".to_string()),
AssistId("apply_demorgan", AssistKind::RefactorRewrite),
"Apply De Morgan's law",
op_range,
@@ -143,6 +150,127 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
)
}
+// Assist: apply_demorgan_iterator
+//
+// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law] to
+// `Iterator::all` and `Iterator::any`.
+//
+// This transforms expressions of the form `!iter.any(|x| predicate(x))` into
+// `iter.all(|x| !predicate(x))` and vice versa. This also works the other way for
+// `Iterator::all` into `Iterator::any`.
+//
+// ```
+// # //- minicore: iterator
+// fn main() {
+// let arr = [1, 2, 3];
+// if !arr.into_iter().$0any(|num| num == 4) {
+// println!("foo");
+// }
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let arr = [1, 2, 3];
+// if arr.into_iter().all(|num| num != 4) {
+// println!("foo");
+// }
+// }
+// ```
+pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let method_call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+ let (name, arg_expr) = validate_method_call_expr(ctx, &method_call)?;
+
+ let ast::Expr::ClosureExpr(closure_expr) = arg_expr else { return None };
+ let closure_body = closure_expr.body()?;
+
+ let op_range = method_call.syntax().text_range();
+ let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str());
+ acc.add_group(
+ &GroupLabel("Apply De Morgan's law".to_string()),
+ AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite),
+ label,
+ op_range,
+ |edit| {
+ // replace the method name
+ let new_name = match name.text().as_str() {
+ "all" => make::name_ref("any"),
+ "any" => make::name_ref("all"),
+ _ => unreachable!(),
+ }
+ .clone_for_update();
+ edit.replace_ast(name, new_name);
+
+ // negate all tail expressions in the closure body
+ let tail_cb = &mut |e: &_| tail_cb_impl(edit, e);
+ walk_expr(&closure_body, &mut |expr| {
+ if let ast::Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&closure_body, tail_cb);
+
+ // negate the whole method call
+ if let Some(prefix_expr) = method_call
+ .syntax()
+ .parent()
+ .and_then(ast::PrefixExpr::cast)
+ .filter(|prefix_expr| matches!(prefix_expr.op_kind(), Some(ast::UnaryOp::Not)))
+ {
+ edit.delete(
+ prefix_expr
+ .op_token()
+ .expect("prefix expression always has an operator")
+ .text_range(),
+ );
+ } else {
+ edit.insert(method_call.syntax().text_range().start(), "!");
+ }
+ },
+ )
+}
+
+/// Ensures that the method call is to `Iterator::all` or `Iterator::any`.
+fn validate_method_call_expr(
+ ctx: &AssistContext<'_>,
+ method_call: &ast::MethodCallExpr,
+) -> Option<(ast::NameRef, ast::Expr)> {
+ let name_ref = method_call.name_ref()?;
+ if name_ref.text() != "all" && name_ref.text() != "any" {
+ return None;
+ }
+ let arg_expr = method_call.arg_list()?.args().next()?;
+
+ let sema = &ctx.sema;
+
+ let receiver = method_call.receiver()?;
+ let it_type = sema.type_of_expr(&receiver)?.adjusted();
+ let module = sema.scope(receiver.syntax())?.module();
+ let krate = module.krate();
+
+ let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
+ it_type.impls_trait(sema.db, iter_trait, &[]).then_some((name_ref, arg_expr))
+}
+
+fn tail_cb_impl(edit: &mut SourceChangeBuilder, e: &ast::Expr) {
+ match e {
+ ast::Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(edit, e))
+ }
+ }
+ ast::Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
+ }
+ e => {
+ let inverted_body = invert_boolean_expression(e.clone());
+ edit.replace(e.syntax().text_range(), inverted_body.syntax().text());
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -255,4 +383,206 @@ fn f() { !(S <= S || S < S) }
"fn() { let x = a && b && c; }",
)
}
+
+ #[test]
+ fn demorgan_iterator_any_all_reverse() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| num $0!= 4) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().any(|num| num == 4) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_all_any() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0all(|num| num > 3) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().any(|num| num <= 3) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_multiple_terms() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0any(|num| num > 3 && num == 23 && num <= 30) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| !(num > 3 && num == 23 && num <= 30)) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_double_negation() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0all(|num| !(num > 3)) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().any(|num| num > 3) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_double_parens() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0any(|num| (num > 3 && (num == 1 || num == 2))) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| !(num > 3 && (num == 1 || num == 2))) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_multiline() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if arr
+ .into_iter()
+ .all$0(|num| !num.is_negative())
+ {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr
+ .into_iter()
+ .any(|num| num.is_negative())
+ {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_block_closure() {
+ check_assist(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [-1, 1, 2, 3];
+ if arr.into_iter().all(|num: i32| {
+ $0if num.is_positive() {
+ num <= 3
+ } else {
+ num >= -1
+ }
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+fn main() {
+ let arr = [-1, 1, 2, 3];
+ if !arr.into_iter().any(|num: i32| {
+ if num.is_positive() {
+ num > 3
+ } else {
+ num < -1
+ }
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn demorgan_iterator_wrong_method() {
+ check_assist_not_applicable(
+ apply_demorgan_iterator,
+ r#"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0map(|num| num > 3) {
+ println!("foo");
+ }
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
index 7acf2ea0a..f508c42c5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs
@@ -5,7 +5,7 @@ use ide_db::{
helpers::mod_path_to_ast,
imports::{
import_assets::{ImportAssets, ImportCandidate, LocatedImport},
- insert_use::{insert_use, ImportScope},
+ insert_use::{insert_use, insert_use_as_alias, ImportScope},
},
};
use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
@@ -93,6 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
&ctx.sema,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_no_std,
);
if proposed_imports.is_empty() {
return None;
@@ -129,10 +130,12 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
for import in proposed_imports {
let import_path = import.import_path;
+ let (assist_id, import_name) =
+ (AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db()));
acc.add_group(
&group_label,
- AssistId("auto_import", AssistKind::QuickFix),
- format!("Import `{}`", import_path.display(ctx.db())),
+ assist_id,
+ format!("Import `{}`", import_name),
range,
|builder| {
let scope = match scope.clone() {
@@ -143,6 +146,38 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
insert_use(&scope, mod_path_to_ast(&import_path), &ctx.config.insert_use);
},
);
+
+ match import_assets.import_candidate() {
+ ImportCandidate::TraitAssocItem(name) | ImportCandidate::TraitMethod(name) => {
+ let is_method =
+ matches!(import_assets.import_candidate(), ImportCandidate::TraitMethod(_));
+ let type_ = if is_method { "method" } else { "item" };
+ let group_label = GroupLabel(format!(
+ "Import a trait for {} {} by alias",
+ type_,
+ name.assoc_item_name.text()
+ ));
+ acc.add_group(
+ &group_label,
+ assist_id,
+ format!("Import `{} as _`", import_name),
+ range,
+ |builder| {
+ let scope = match scope.clone() {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+ insert_use_as_alias(
+ &scope,
+ mod_path_to_ast(&import_path),
+ &ctx.config.insert_use,
+ );
+ },
+ );
+ }
+ _ => {}
+ }
}
Some(())
}
@@ -253,7 +288,8 @@ mod tests {
};
use crate::tests::{
- check_assist, check_assist_not_applicable, check_assist_target, TEST_CONFIG,
+ check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target,
+ TEST_CONFIG,
};
fn check_auto_import_order(before: &str, order: &[&str]) {
@@ -705,7 +741,7 @@ fn main() {
#[test]
fn associated_trait_function() {
- check_assist(
+ check_assist_by_label(
auto_import,
r"
mod test_mod {
@@ -739,6 +775,44 @@ fn main() {
test_mod::TestStruct::test_function
}
",
+ "Import `test_mod::TestTrait`",
+ );
+
+ check_assist_by_label(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait as _;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::test_function
+ }
+ ",
+ "Import `test_mod::TestTrait as _`",
);
}
@@ -776,7 +850,44 @@ fn main() {
#[test]
fn associated_trait_const() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST$0
+ }
+ ",
+ r"
+ use test_mod::TestTrait as _;
+
+ mod test_mod {
+ pub trait TestTrait {
+ const TEST_CONST: u8;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const TEST_CONST: u8 = 42;
+ }
+ }
+
+ fn main() {
+ test_mod::TestStruct::TEST_CONST
+ }
+ ",
+ "Import `test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
mod test_mod {
@@ -810,6 +921,7 @@ fn main() {
test_mod::TestStruct::TEST_CONST
}
",
+ "Import `test_mod::TestTrait`",
);
}
@@ -847,7 +959,46 @@ fn main() {
#[test]
fn trait_method() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ ",
+ r"
+ use test_mod::TestTrait as _;
+
+ mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+
+ fn main() {
+ let test_struct = test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ "Import `test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
mod test_mod {
@@ -883,12 +1034,43 @@ fn main() {
test_struct.test_method()
}
",
+ "Import `test_mod::TestTrait`",
);
}
#[test]
fn trait_method_cross_crate() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_meth$0od()
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_method(&self);
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_method(&self) {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait as _;
+
+ fn main() {
+ let test_struct = dep::test_mod::TestStruct {};
+ test_struct.test_method()
+ }
+ ",
+ "Import `dep::test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@@ -915,12 +1097,41 @@ fn main() {
test_struct.test_method()
}
",
+ "Import `dep::test_mod::TestTrait`",
);
}
#[test]
fn assoc_fn_cross_crate() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::test_func$0tion
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ fn test_function();
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ fn test_function() {}
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait as _;
+
+ fn main() {
+ dep::test_mod::TestStruct::test_function
+ }
+ ",
+ "Import `dep::test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@@ -945,12 +1156,41 @@ fn main() {
dep::test_mod::TestStruct::test_function
}
",
+ "Import `dep::test_mod::TestTrait`",
);
}
#[test]
fn assoc_const_cross_crate() {
- check_assist(
+ check_assist_by_label(
+ auto_import,
+ r"
+ //- /main.rs crate:main deps:dep
+ fn main() {
+ dep::test_mod::TestStruct::CONST$0
+ }
+ //- /dep.rs crate:dep
+ pub mod test_mod {
+ pub trait TestTrait {
+ const CONST: bool;
+ }
+ pub struct TestStruct {}
+ impl TestTrait for TestStruct {
+ const CONST: bool = true;
+ }
+ }
+ ",
+ r"
+ use dep::test_mod::TestTrait as _;
+
+ fn main() {
+ dep::test_mod::TestStruct::CONST
+ }
+ ",
+ "Import `dep::test_mod::TestTrait as _`",
+ );
+
+ check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@@ -975,6 +1215,7 @@ fn main() {
dep::test_mod::TestStruct::CONST
}
",
+ "Import `dep::test_mod::TestTrait`",
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs
new file mode 100644
index 000000000..0f2d1057c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs
@@ -0,0 +1,1675 @@
+use hir::ModuleDef;
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ search::{FileReference, UsageSearchResult},
+ source_change::SourceChangeBuilder,
+ FxHashSet,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{
+ self,
+ edit::IndentLevel,
+ edit_in_place::{AttrsOwnerEdit, Indent},
+ make, HasName,
+ },
+ ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
+};
+use text_edit::TextRange;
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: bool_to_enum
+//
+// This converts boolean local variables, fields, constants, and statics into a new
+// enum with two variants `Bool::True` and `Bool::False`, as well as replacing
+// all assignments with the variants and replacing all usages with `== Bool::True` or
+// `== Bool::False`.
+//
+// ```
+// fn main() {
+// let $0bool = true;
+//
+// if bool {
+// println!("foo");
+// }
+// }
+// ```
+// ->
+// ```
+// #[derive(PartialEq, Eq)]
+// enum Bool { True, False }
+//
+// fn main() {
+// let bool = Bool::True;
+//
+// if bool == Bool::True {
+// println!("foo");
+// }
+// }
+// ```
+pub(crate) fn bool_to_enum(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let BoolNodeData { target_node, name, ty_annotation, initializer, definition } =
+ find_bool_node(ctx)?;
+ let target_module = ctx.sema.scope(&target_node)?.module().nearest_non_block_module(ctx.db());
+
+ let target = name.syntax().text_range();
+ acc.add(
+ AssistId("bool_to_enum", AssistKind::RefactorRewrite),
+ "Convert boolean to enum",
+ target,
+ |edit| {
+ if let Some(ty) = &ty_annotation {
+ cov_mark::hit!(replaces_ty_annotation);
+ edit.replace(ty.syntax().text_range(), "Bool");
+ }
+
+ if let Some(initializer) = initializer {
+ replace_bool_expr(edit, initializer);
+ }
+
+ let usages = definition.usages(&ctx.sema).all();
+ add_enum_def(edit, ctx, &usages, target_node, &target_module);
+ replace_usages(edit, ctx, &usages, definition, &target_module);
+ },
+ )
+}
+
+struct BoolNodeData {
+ target_node: SyntaxNode,
+ name: ast::Name,
+ ty_annotation: Option<ast::Type>,
+ initializer: Option<ast::Expr>,
+ definition: Definition,
+}
+
+/// Attempts to find an appropriate node to apply the action to.
+fn find_bool_node(ctx: &AssistContext<'_>) -> Option<BoolNodeData> {
+ let name: ast::Name = ctx.find_node_at_offset()?;
+
+ if let Some(let_stmt) = name.syntax().ancestors().find_map(ast::LetStmt::cast) {
+ let bind_pat = match let_stmt.pat()? {
+ ast::Pat::IdentPat(pat) => pat,
+ _ => {
+ cov_mark::hit!(not_applicable_in_non_ident_pat);
+ return None;
+ }
+ };
+ let def = ctx.sema.to_def(&bind_pat)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_local);
+ return None;
+ }
+
+ Some(BoolNodeData {
+ target_node: let_stmt.syntax().clone(),
+ name,
+ ty_annotation: let_stmt.ty(),
+ initializer: let_stmt.initializer(),
+ definition: Definition::Local(def),
+ })
+ } else if let Some(const_) = name.syntax().parent().and_then(ast::Const::cast) {
+ let def = ctx.sema.to_def(&const_)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_const);
+ return None;
+ }
+
+ Some(BoolNodeData {
+ target_node: const_.syntax().clone(),
+ name,
+ ty_annotation: const_.ty(),
+ initializer: const_.body(),
+ definition: Definition::Const(def),
+ })
+ } else if let Some(static_) = name.syntax().parent().and_then(ast::Static::cast) {
+ let def = ctx.sema.to_def(&static_)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_static);
+ return None;
+ }
+
+ Some(BoolNodeData {
+ target_node: static_.syntax().clone(),
+ name,
+ ty_annotation: static_.ty(),
+ initializer: static_.body(),
+ definition: Definition::Static(def),
+ })
+ } else {
+ let field = name.syntax().parent().and_then(ast::RecordField::cast)?;
+ if field.name()? != name {
+ return None;
+ }
+
+ let adt = field.syntax().ancestors().find_map(ast::Adt::cast)?;
+ let def = ctx.sema.to_def(&field)?;
+ if !def.ty(ctx.db()).is_bool() {
+ cov_mark::hit!(not_applicable_non_bool_field);
+ return None;
+ }
+ Some(BoolNodeData {
+ target_node: adt.syntax().clone(),
+ name,
+ ty_annotation: field.ty(),
+ initializer: None,
+ definition: Definition::Field(def),
+ })
+ }
+}
+
+fn replace_bool_expr(edit: &mut SourceChangeBuilder, expr: ast::Expr) {
+ let expr_range = expr.syntax().text_range();
+ let enum_expr = bool_expr_to_enum_expr(expr);
+ edit.replace(expr_range, enum_expr.syntax().text())
+}
+
+/// Converts an expression of type `bool` to one of the new enum type.
+fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
+ let true_expr = make::expr_path(make::path_from_text("Bool::True")).clone_for_update();
+ let false_expr = make::expr_path(make::path_from_text("Bool::False")).clone_for_update();
+
+ if let ast::Expr::Literal(literal) = &expr {
+ match literal.kind() {
+ ast::LiteralKind::Bool(true) => true_expr,
+ ast::LiteralKind::Bool(false) => false_expr,
+ _ => expr,
+ }
+ } else {
+ make::expr_if(
+ expr,
+ make::tail_only_block_expr(true_expr),
+ Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))),
+ )
+ .clone_for_update()
+ }
+}
+
+/// Replaces all usages of the target identifier, both when read and written to.
+fn replace_usages(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ target_definition: Definition,
+ target_module: &hir::Module,
+) {
+ for (file_id, references) in usages.iter() {
+ edit.edit_file(*file_id);
+
+ let refs_with_imports =
+ augment_references_with_imports(edit, ctx, references, target_module);
+
+ refs_with_imports.into_iter().rev().for_each(
+ |FileReferenceWithImport { range, old_name, new_name, import_data }| {
+ // replace the usages in patterns and expressions
+ if let Some(ident_pat) = old_name.syntax().ancestors().find_map(ast::IdentPat::cast)
+ {
+ cov_mark::hit!(replaces_record_pat_shorthand);
+
+ let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local);
+ if let Some(def) = definition {
+ replace_usages(
+ edit,
+ ctx,
+ &def.usages(&ctx.sema).all(),
+ target_definition,
+ target_module,
+ )
+ }
+ } else if let Some(initializer) = find_assignment_usage(&new_name) {
+ cov_mark::hit!(replaces_assignment);
+
+ replace_bool_expr(edit, initializer);
+ } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&new_name) {
+ cov_mark::hit!(replaces_negation);
+
+ edit.replace(
+ prefix_expr.syntax().text_range(),
+ format!("{} == Bool::False", inner_expr),
+ );
+ } else if let Some((record_field, initializer)) = old_name
+ .as_name_ref()
+ .and_then(ast::RecordExprField::for_field_name)
+ .and_then(|record_field| ctx.sema.resolve_record_field(&record_field))
+ .and_then(|(got_field, _, _)| {
+ find_record_expr_usage(&new_name, got_field, target_definition)
+ })
+ {
+ cov_mark::hit!(replaces_record_expr);
+
+ let record_field = edit.make_mut(record_field);
+ let enum_expr = bool_expr_to_enum_expr(initializer);
+ record_field.replace_expr(enum_expr);
+ } else if let Some(pat) = find_record_pat_field_usage(&old_name) {
+ match pat {
+ ast::Pat::IdentPat(ident_pat) => {
+ cov_mark::hit!(replaces_record_pat);
+
+ let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local);
+ if let Some(def) = definition {
+ replace_usages(
+ edit,
+ ctx,
+ &def.usages(&ctx.sema).all(),
+ target_definition,
+ target_module,
+ )
+ }
+ }
+ ast::Pat::LiteralPat(literal_pat) => {
+ cov_mark::hit!(replaces_literal_pat);
+
+ if let Some(expr) = literal_pat.literal().and_then(|literal| {
+ literal.syntax().ancestors().find_map(ast::Expr::cast)
+ }) {
+ replace_bool_expr(edit, expr);
+ }
+ }
+ _ => (),
+ }
+ } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&new_name)
+ {
+ edit.replace(ty_annotation.syntax().text_range(), "Bool");
+ replace_bool_expr(edit, initializer);
+ } else if let Some(receiver) = find_method_call_expr_usage(&new_name) {
+ edit.replace(
+ receiver.syntax().text_range(),
+ format!("({} == Bool::True)", receiver),
+ );
+ } else if new_name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
+ // for any other usage in an expression, replace it with a check that it is the true variant
+ if let Some((record_field, expr)) = new_name
+ .as_name_ref()
+ .and_then(ast::RecordExprField::for_field_name)
+ .and_then(|record_field| {
+ record_field.expr().map(|expr| (record_field, expr))
+ })
+ {
+ record_field.replace_expr(
+ make::expr_bin_op(
+ expr,
+ ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }),
+ make::expr_path(make::path_from_text("Bool::True")),
+ )
+ .clone_for_update(),
+ );
+ } else {
+ edit.replace(range, format!("{} == Bool::True", new_name.text()));
+ }
+ }
+
+ // add imports across modules where needed
+ if let Some((import_scope, path)) = import_data {
+ insert_use(&import_scope, path, &ctx.config.insert_use);
+ }
+ },
+ )
+ }
+}
+
+struct FileReferenceWithImport {
+ range: TextRange,
+ old_name: ast::NameLike,
+ new_name: ast::NameLike,
+ import_data: Option<(ImportScope, ast::Path)>,
+}
+
+fn augment_references_with_imports(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ references: &[FileReference],
+ target_module: &hir::Module,
+) -> Vec<FileReferenceWithImport> {
+ let mut visited_modules = FxHashSet::default();
+
+ references
+ .iter()
+ .filter_map(|FileReference { range, name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module()))
+ })
+ .map(|(range, name, ref_module)| {
+ let old_name = name.clone();
+ let new_name = edit.make_mut(name.clone());
+
+ // if the referenced module is not the same as the target one and has not been seen before, add an import
+ let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
+ && !visited_modules.contains(&ref_module)
+ {
+ visited_modules.insert(ref_module);
+
+ let import_scope =
+ ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
+ let path = ref_module
+ .find_use_path_prefixed(
+ ctx.sema.db,
+ ModuleDef::Module(*target_module),
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
+ .map(|mod_path| {
+ make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool"))
+ });
+
+ import_scope.zip(path)
+ } else {
+ None
+ };
+
+ FileReferenceWithImport { range, old_name, new_name, import_data }
+ })
+ .collect()
+}
+
+fn find_assignment_usage(name: &ast::NameLike) -> Option<ast::Expr> {
+ let bin_expr = name.syntax().ancestors().find_map(ast::BinExpr::cast)?;
+
+ if !bin_expr.lhs()?.syntax().descendants().contains(name.syntax()) {
+ cov_mark::hit!(dont_assign_incorrect_ref);
+ return None;
+ }
+
+ if let Some(ast::BinaryOp::Assignment { op: None }) = bin_expr.op_kind() {
+ bin_expr.rhs()
+ } else {
+ None
+ }
+}
+
+fn find_negated_usage(name: &ast::NameLike) -> Option<(ast::PrefixExpr, ast::Expr)> {
+ let prefix_expr = name.syntax().ancestors().find_map(ast::PrefixExpr::cast)?;
+
+ if !matches!(prefix_expr.expr()?, ast::Expr::PathExpr(_) | ast::Expr::FieldExpr(_)) {
+ cov_mark::hit!(dont_overwrite_expression_inside_negation);
+ return None;
+ }
+
+ if let Some(ast::UnaryOp::Not) = prefix_expr.op_kind() {
+ let inner_expr = prefix_expr.expr()?;
+ Some((prefix_expr, inner_expr))
+ } else {
+ None
+ }
+}
+
+fn find_record_expr_usage(
+ name: &ast::NameLike,
+ got_field: hir::Field,
+ target_definition: Definition,
+) -> Option<(ast::RecordExprField, ast::Expr)> {
+ let name_ref = name.as_name_ref()?;
+ let record_field = ast::RecordExprField::for_field_name(name_ref)?;
+ let initializer = record_field.expr()?;
+
+ if let Definition::Field(expected_field) = target_definition {
+ if got_field != expected_field {
+ return None;
+ }
+ }
+
+ Some((record_field, initializer))
+}
+
+fn find_record_pat_field_usage(name: &ast::NameLike) -> Option<ast::Pat> {
+ let record_pat_field = name.syntax().parent().and_then(ast::RecordPatField::cast)?;
+ let pat = record_pat_field.pat()?;
+
+ match pat {
+ ast::Pat::IdentPat(_) | ast::Pat::LiteralPat(_) | ast::Pat::WildcardPat(_) => Some(pat),
+ _ => None,
+ }
+}
+
+fn find_assoc_const_usage(name: &ast::NameLike) -> Option<(ast::Type, ast::Expr)> {
+ let const_ = name.syntax().parent().and_then(ast::Const::cast)?;
+ if const_.syntax().parent().and_then(ast::AssocItemList::cast).is_none() {
+ return None;
+ }
+
+ Some((const_.ty()?, const_.body()?))
+}
+
+fn find_method_call_expr_usage(name: &ast::NameLike) -> Option<ast::Expr> {
+ let method_call = name.syntax().ancestors().find_map(ast::MethodCallExpr::cast)?;
+ let receiver = method_call.receiver()?;
+
+ if !receiver.syntax().descendants().contains(name.syntax()) {
+ return None;
+ }
+
+ Some(receiver)
+}
+
+/// Adds the definition of the new enum before the target node.
+fn add_enum_def(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ target_node: SyntaxNode,
+ target_module: &hir::Module,
+) {
+ let make_enum_pub = usages
+ .iter()
+ .flat_map(|(_, refs)| refs)
+ .filter_map(|FileReference { name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| scope.module())
+ })
+ .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
+ let enum_def = make_bool_enum(make_enum_pub);
+
+ let insert_before = node_to_insert_before(target_node);
+ let indent = IndentLevel::from_node(&insert_before);
+ enum_def.reindent_to(indent);
+
+ ted::insert_all(
+ ted::Position::before(&edit.make_syntax_mut(insert_before)),
+ vec![
+ enum_def.syntax().clone().into(),
+ make::tokens::whitespace(&format!("\n\n{indent}")).into(),
+ ],
+ );
+}
+
+/// Finds where to put the new enum definition.
+/// Tries to find the ast node at the nearest module or at top-level, otherwise just
+/// returns the input node.
+fn node_to_insert_before(target_node: SyntaxNode) -> SyntaxNode {
+ target_node
+ .ancestors()
+ .take_while(|it| !matches!(it.kind(), SyntaxKind::MODULE | SyntaxKind::SOURCE_FILE))
+ .filter(|it| ast::Item::can_cast(it.kind()))
+ .last()
+ .unwrap_or(target_node)
+}
+
+fn make_bool_enum(make_pub: bool) -> ast::Enum {
+ let enum_def = make::enum_(
+ if make_pub { Some(make::visibility_pub()) } else { None },
+ make::name("Bool"),
+ make::variant_list(vec![
+ make::variant(make::name("True"), None),
+ make::variant(make::name("False"), None),
+ ]),
+ )
+ .clone_for_update();
+
+ let derive_eq = make::attr_outer(make::meta_token_tree(
+ make::ext::ident_path("derive"),
+ make::token_tree(
+ T!['('],
+ vec![
+ NodeOrToken::Token(make::tokens::ident("PartialEq")),
+ NodeOrToken::Token(make::token(T![,])),
+ NodeOrToken::Token(make::tokens::single_space()),
+ NodeOrToken::Token(make::tokens::ident("Eq")),
+ ],
+ ),
+ ))
+ .clone_for_update();
+ enum_def.add_attr(derive_eq);
+
+ enum_def
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn local_variable_with_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+
+ if foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+
+ if foo == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_with_usage_negated() {
+ cov_mark::check!(replaces_negation);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+
+ if !foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+
+ if foo == Bool::False {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_with_type_annotation() {
+ cov_mark::check!(replaces_ty_annotation);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo: bool = false;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo: Bool = Bool::False;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_with_non_literal_initializer() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = 1 == 2;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = if 1 == 2 { Bool::True } else { Bool::False };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_binexpr_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = false;
+ let bar = true;
+
+ if !foo && bar {
+ println!("foobar");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::False;
+ let bar = true;
+
+ if foo == Bool::False && bar {
+ println!("foobar");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_unop_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+
+ if *&foo {
+ println!("foobar");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+
+ if *&foo == Bool::True {
+ println!("foobar");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_assigned_later() {
+ cov_mark::check!(replaces_assignment);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo: bool;
+ foo = true;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo: Bool;
+ foo = Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_does_not_apply_recursively() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = true;
+ let bar = !foo;
+
+ if bar {
+ println!("bar");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+ let bar = foo == Bool::False;
+
+ if bar {
+ println!("bar");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_nested_in_negation() {
+ cov_mark::check!(dont_overwrite_expression_inside_negation);
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ if !"foo".chars().any(|c| {
+ let $0foo = true;
+ foo
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ if !"foo".chars().any(|c| {
+ let foo = Bool::True;
+ foo == Bool::True
+ }) {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_local);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+fn main() {
+ let $0foo = 1;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_cursor_not_on_ident() {
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+fn main() {
+ let foo = $0true;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_variable_non_ident_pat() {
+ cov_mark::check!(not_applicable_in_non_ident_pat);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+fn main() {
+ let ($0foo, bar) = (true, false);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_struct_basic() {
+ cov_mark::check!(replaces_record_expr);
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo = Foo { bar: true, baz: false };
+
+ if foo.bar {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bar: Bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo = Foo { bar: Bool::True, baz: false };
+
+ if foo.bar == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_basic() {
+ cov_mark::check!(replaces_record_pat);
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ if let Foo::Bar { bar: baz } = foo {
+ if baz {
+ println!("foo");
+ }
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ if let Foo::Bar { bar: baz } = foo {
+ if baz == Bool::True {
+ println!("foo");
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_cross_file() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /foo.rs
+pub enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn foo() {
+ let foo = Foo::Bar { bar: true };
+}
+
+//- /main.rs
+use foo::Foo;
+
+mod foo;
+
+fn main() {
+ let foo = Foo::Bar { bar: false };
+}
+"#,
+ r#"
+//- /foo.rs
+#[derive(PartialEq, Eq)]
+pub enum Bool { True, False }
+
+pub enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn foo() {
+ let foo = Foo::Bar { bar: Bool::True };
+}
+
+//- /main.rs
+use foo::{Foo, Bool};
+
+mod foo;
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::False };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_shorthand() {
+ cov_mark::check!(replaces_record_pat_shorthand);
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ match foo {
+ Foo::Bar { bar } => {
+ if bar {
+ println!("foo");
+ }
+ }
+ _ => (),
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ match foo {
+ Foo::Bar { bar } => {
+ if bar == Bool::True {
+ println!("foo");
+ }
+ }
+ _ => (),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_replaces_literal_patterns() {
+ cov_mark::check!(replaces_literal_pat);
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ if let Foo::Bar { bar: true } = foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ if let Foo::Bar { bar: Bool::True } = foo {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_enum_keeps_wildcard_patterns() {
+ check_assist(
+ bool_to_enum,
+ r#"
+enum Foo {
+ Foo,
+ Bar { $0bar: bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: true };
+
+ if let Foo::Bar { bar: _ } = foo {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+enum Foo {
+ Foo,
+ Bar { bar: Bool },
+}
+
+fn main() {
+ let foo = Foo::Bar { bar: Bool::True };
+
+ if let Foo::Bar { bar: _ } = foo {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_union_basic() {
+ check_assist(
+ bool_to_enum,
+ r#"
+union Foo {
+ $0foo: bool,
+ bar: usize,
+}
+
+fn main() {
+ let foo = Foo { foo: true };
+
+ if unsafe { foo.foo } {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+union Foo {
+ foo: Bool,
+ bar: usize,
+}
+
+fn main() {
+ let foo = Foo { foo: Bool::True };
+
+ if unsafe { foo.foo == Bool::True } {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_negated() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: bool,
+}
+
+fn main() {
+ let foo = Foo { bar: false };
+
+ if !foo.bar {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bar: Bool,
+}
+
+fn main() {
+ let foo = Foo { bar: Bool::False };
+
+ if foo.bar == Bool::False {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_in_mod_properly_indented() {
+ check_assist(
+ bool_to_enum,
+ r#"
+mod foo {
+ struct Bar {
+ $0baz: bool,
+ }
+
+ impl Bar {
+ fn new(baz: bool) -> Self {
+ Self { baz }
+ }
+ }
+}
+"#,
+ r#"
+mod foo {
+ #[derive(PartialEq, Eq)]
+ enum Bool { True, False }
+
+ struct Bar {
+ baz: Bool,
+ }
+
+ impl Bar {
+ fn new(baz: bool) -> Self {
+ Self { baz: if baz { Bool::True } else { Bool::False } }
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_multiple_initializations() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo1 = Foo { bar: true, baz: false };
+ let foo2 = Foo { bar: false, baz: false };
+
+ if foo1.bar && foo2.bar {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bar: Bool,
+ baz: bool,
+}
+
+fn main() {
+ let foo1 = Foo { bar: Bool::True, baz: false };
+ let foo2 = Foo { bar: Bool::False, baz: false };
+
+ if foo1.bar == Bool::True && foo2.bar == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_assigned_to_another() {
+ cov_mark::check!(dont_assign_incorrect_ref);
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0foo: bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: true };
+ let mut bar = Bar { bar: true };
+
+ bar.bar = foo.foo;
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ foo: Bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: Bool::True };
+ let mut bar = Bar { bar: true };
+
+ bar.bar = foo.foo == Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_initialized_with_other() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0foo: bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: true };
+ let bar = Bar { bar: foo.foo };
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ foo: Bool,
+}
+
+struct Bar {
+ bar: bool,
+}
+
+fn main() {
+ let foo = Foo { foo: Bool::True };
+ let bar = Bar { bar: foo.foo == Bool::True };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_method_chain_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bool: bool,
+}
+
+fn main() {
+ let foo = Foo { bool: true };
+
+ foo.bool.then(|| 2);
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Foo {
+ bool: Bool,
+}
+
+fn main() {
+ let foo = Foo { bool: Bool::True };
+
+ (foo.bool == Bool::True).then(|| 2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_field);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+struct Foo {
+ $0bar: usize,
+}
+
+fn main() {
+ let foo = Foo { bar: 1 };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_basic() {
+ check_assist(
+ bool_to_enum,
+ r#"
+const $0FOO: bool = false;
+
+fn main() {
+ if FOO {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+const FOO: Bool = Bool::False;
+
+fn main() {
+ if FOO == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_module() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ if foo::FOO {
+ println!("foo");
+ }
+}
+
+mod foo {
+ pub const $0FOO: bool = true;
+}
+"#,
+ r#"
+use foo::Bool;
+
+fn main() {
+ if foo::FOO == Bool::True {
+ println!("foo");
+ }
+}
+
+mod foo {
+ #[derive(PartialEq, Eq)]
+ pub enum Bool { True, False }
+
+ pub const FOO: Bool = Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_module_with_import() {
+ check_assist(
+ bool_to_enum,
+ r#"
+fn main() {
+ use foo::FOO;
+
+ if FOO {
+ println!("foo");
+ }
+}
+
+mod foo {
+ pub const $0FOO: bool = true;
+}
+"#,
+ r#"
+use crate::foo::Bool;
+
+fn main() {
+ use foo::FOO;
+
+ if FOO == Bool::True {
+ println!("foo");
+ }
+}
+
+mod foo {
+ #[derive(PartialEq, Eq)]
+ pub enum Bool { True, False }
+
+ pub const FOO: Bool = Bool::True;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_cross_file() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ if foo::FOO {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+pub const $0FOO: bool = true;
+"#,
+ r#"
+//- /main.rs
+use foo::Bool;
+
+mod foo;
+
+fn main() {
+ if foo::FOO == Bool::True {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+#[derive(PartialEq, Eq)]
+pub enum Bool { True, False }
+
+pub const FOO: Bool = Bool::True;
+"#,
+ )
+ }
+
+ #[test]
+ fn const_cross_file_and_module() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ if bar::BAR {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+pub mod bar {
+ pub const $0BAR: bool = false;
+}
+"#,
+ r#"
+//- /main.rs
+use crate::foo::bar::Bool;
+
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ if bar::BAR == Bool::True {
+ println!("foo");
+ }
+}
+
+//- /foo.rs
+pub mod bar {
+ #[derive(PartialEq, Eq)]
+ pub enum Bool { True, False }
+
+ pub const BAR: Bool = Bool::False;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_impl_cross_file() {
+ check_assist(
+ bool_to_enum,
+ r#"
+//- /main.rs
+mod foo;
+
+struct Foo;
+
+impl Foo {
+ pub const $0BOOL: bool = true;
+}
+
+//- /foo.rs
+use crate::Foo;
+
+fn foo() -> bool {
+ Foo::BOOL
+}
+"#,
+ r#"
+//- /main.rs
+mod foo;
+
+struct Foo;
+
+#[derive(PartialEq, Eq)]
+pub enum Bool { True, False }
+
+impl Foo {
+ pub const BOOL: Bool = Bool::True;
+}
+
+//- /foo.rs
+use crate::{Foo, Bool};
+
+fn foo() -> bool {
+ Foo::BOOL == Bool::True
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_in_trait() {
+ check_assist(
+ bool_to_enum,
+ r#"
+trait Foo {
+ const $0BOOL: bool;
+}
+
+impl Foo for usize {
+ const BOOL: bool = true;
+}
+
+fn main() {
+ if <usize as Foo>::BOOL {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+trait Foo {
+ const BOOL: Bool;
+}
+
+impl Foo for usize {
+ const BOOL: Bool = Bool::True;
+}
+
+fn main() {
+ if <usize as Foo>::BOOL == Bool::True {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn const_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_const);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+const $0FOO: &str = "foo";
+
+fn main() {
+ println!("{FOO}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn static_basic() {
+ check_assist(
+ bool_to_enum,
+ r#"
+static mut $0BOOL: bool = true;
+
+fn main() {
+ unsafe { BOOL = false };
+ if unsafe { BOOL } {
+ println!("foo");
+ }
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+static mut BOOL: Bool = Bool::True;
+
+fn main() {
+ unsafe { BOOL = Bool::False };
+ if unsafe { BOOL == Bool::True } {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn static_non_bool() {
+ cov_mark::check!(not_applicable_non_bool_static);
+ check_assist_not_applicable(
+ bool_to_enum,
+ r#"
+static mut $0FOO: usize = 0;
+
+fn main() {
+ if unsafe { FOO } == 0 {
+ println!("foo");
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn not_applicable_to_other_names() {
+ check_assist_not_applicable(bool_to_enum, "fn $0main() {}")
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
index 1acd5ee97..3f478ee7d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
@@ -25,9 +25,7 @@ pub(crate) fn convert_comment_block(acc: &mut Assists, ctx: &AssistContext<'_>)
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
- if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
- return None;
- }
+ Whitespace::cast(prev).filter(|w| w.text().contains('\n'))?;
}
match comment.kind().shape {
@@ -78,7 +76,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// Establish the target of our edit based on the comments we found
let target = TextRange::new(
comments[0].syntax().text_range().start(),
- comments.last().unwrap().syntax().text_range().end(),
+ comments.last()?.syntax().text_range().end(),
);
acc.add(
@@ -91,8 +89,12 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// contents of each line comment when they're put into the block comment.
let indentation = IndentLevel::from_token(comment.syntax());
- let block_comment_body =
- comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
+ let block_comment_body = comments
+ .into_iter()
+ .map(|c| line_comment_text(indentation, c))
+ .collect::<Vec<String>>()
+ .into_iter()
+ .join("\n");
let block_prefix =
CommentKind { shape: CommentShape::Block, ..comment.kind() }.prefix();
@@ -160,7 +162,8 @@ pub(crate) fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
//
// But since such comments aren't idiomatic we're okay with this.
pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
- let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap();
+ let text = comm.text();
+ let contents_without_prefix = text.strip_prefix(comm.prefix()).unwrap_or(text);
let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
// Don't add the indentation if the line is empty
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
index 872b52c98..d649f13d6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs
@@ -50,7 +50,12 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
_ => return None,
};
- mod_path_to_ast(&module.find_use_path(ctx.db(), src_type_def, ctx.config.prefer_no_std)?)
+ mod_path_to_ast(&module.find_use_path(
+ ctx.db(),
+ src_type_def,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?)
};
let dest_type = match &ast_trait {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
index 7d0e42476..73ba3f5c4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs
@@ -51,22 +51,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
// Check if there is an IfLet that we can handle.
let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) {
let let_ = single_let(cond)?;
- match let_.pat() {
- Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => {
- let path = pat.path()?;
- if path.qualifier().is_some() {
- return None;
- }
-
- let bound_ident = pat.fields().next()?;
- if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
- return None;
- }
-
- (Some((path, bound_ident)), let_.expr()?)
- }
- _ => return None, // Unsupported IfLet.
- }
+ (Some(let_.pat()?), let_.expr()?)
} else {
(None, cond)
};
@@ -136,11 +121,10 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
};
new_expr.syntax().clone_for_update()
}
- Some((path, bound_ident)) => {
+ Some(pat) => {
// If-let.
- let pat = make::tuple_struct_pat(path, once(bound_ident));
let let_else_stmt = make::let_else_stmt(
- pat.into(),
+ pat,
None,
cond_expr,
ast::make::tail_only_block_expr(early_expression),
@@ -443,6 +427,60 @@ fn main() {
}
#[test]
+ fn convert_arbitrary_if_let_patterns() {
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ $0if let None = Some(92) {
+ foo();
+ }
+}
+"#,
+ r#"
+fn main() {
+ let None = Some(92) else { return };
+ foo();
+}
+"#,
+ );
+
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ $0if let [1, x] = [1, 92] {
+ foo(x);
+ }
+}
+"#,
+ r#"
+fn main() {
+ let [1, x] = [1, 92] else { return };
+ foo(x);
+}
+"#,
+ );
+
+ check_assist(
+ convert_to_guarded_return,
+ r#"
+fn main() {
+ $0if let (Some(x), None) = (Some(92), None) {
+ foo(x);
+ }
+}
+"#,
+ r#"
+fn main() {
+ let (Some(x), None) = (Some(92), None) else { return };
+ foo(x);
+}
+"#,
+ );
+ }
+
+ #[test]
fn ignore_already_converted_if() {
check_assist_not_applicable(
convert_to_guarded_return,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
new file mode 100644
index 000000000..79b46d661
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs
@@ -0,0 +1,889 @@
+use either::Either;
+use hir::ModuleDef;
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ defs::Definition,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ search::{FileReference, UsageSearchResult},
+ source_change::SourceChangeBuilder,
+ syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
+ FxHashSet,
+};
+use syntax::{
+ ast::{self, edit::IndentLevel, edit_in_place::Indent, make, HasName},
+ match_ast, ted, AstNode, SyntaxNode,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: convert_tuple_return_type_to_struct
+//
+// This converts the return type of a function from a tuple type
+// into a tuple struct and updates the body accordingly.
+//
+// ```
+// fn bar() {
+// let (a, b, c) = foo();
+// }
+//
+// fn foo() -> ($0u32, u32, u32) {
+// (1, 2, 3)
+// }
+// ```
+// ->
+// ```
+// fn bar() {
+// let FooResult(a, b, c) = foo();
+// }
+//
+// struct FooResult(u32, u32, u32);
+//
+// fn foo() -> FooResult {
+// FooResult(1, 2, 3)
+// }
+// ```
+pub(crate) fn convert_tuple_return_type_to_struct(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
+ let type_ref = ret_type.ty()?;
+
+ let ast::Type::TupleType(tuple_ty) = &type_ref else { return None };
+ if tuple_ty.fields().any(|field| matches!(field, ast::Type::ImplTraitType(_))) {
+ return None;
+ }
+
+ let fn_ = ret_type.syntax().parent().and_then(ast::Fn::cast)?;
+ let fn_def = ctx.sema.to_def(&fn_)?;
+ let fn_name = fn_.name()?;
+ let target_module = ctx.sema.scope(fn_.syntax())?.module().nearest_non_block_module(ctx.db());
+
+ let target = type_ref.syntax().text_range();
+ acc.add(
+ AssistId("convert_tuple_return_type_to_struct", AssistKind::RefactorRewrite),
+ "Convert tuple return type to tuple struct",
+ target,
+ move |edit| {
+ let ret_type = edit.make_mut(ret_type);
+ let fn_ = edit.make_mut(fn_);
+
+ let usages = Definition::Function(fn_def).usages(&ctx.sema).all();
+ let struct_name = format!("{}Result", stdx::to_camel_case(&fn_name.to_string()));
+ let parent = fn_.syntax().ancestors().find_map(<Either<ast::Impl, ast::Trait>>::cast);
+ add_tuple_struct_def(
+ edit,
+ ctx,
+ &usages,
+ parent.as_ref().map(|it| it.syntax()).unwrap_or(fn_.syntax()),
+ tuple_ty,
+ &struct_name,
+ &target_module,
+ );
+
+ ted::replace(
+ ret_type.syntax(),
+ make::ret_type(make::ty(&struct_name)).syntax().clone_for_update(),
+ );
+
+ if let Some(fn_body) = fn_.body() {
+ replace_body_return_values(ast::Expr::BlockExpr(fn_body), &struct_name);
+ }
+
+ replace_usages(edit, ctx, &usages, &struct_name, &target_module);
+ },
+ )
+}
+
+/// Replaces tuple usages with the corresponding tuple struct pattern.
+fn replace_usages(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ struct_name: &str,
+ target_module: &hir::Module,
+) {
+ for (file_id, references) in usages.iter() {
+ edit.edit_file(*file_id);
+
+ let refs_with_imports =
+ augment_references_with_imports(edit, ctx, references, struct_name, target_module);
+
+ refs_with_imports.into_iter().rev().for_each(|(name, import_data)| {
+ if let Some(fn_) = name.syntax().parent().and_then(ast::Fn::cast) {
+ cov_mark::hit!(replace_trait_impl_fns);
+
+ if let Some(ret_type) = fn_.ret_type() {
+ ted::replace(
+ ret_type.syntax(),
+ make::ret_type(make::ty(struct_name)).syntax().clone_for_update(),
+ );
+ }
+
+ if let Some(fn_body) = fn_.body() {
+ replace_body_return_values(ast::Expr::BlockExpr(fn_body), struct_name);
+ }
+ } else {
+ // replace tuple patterns
+ let pats = name
+ .syntax()
+ .ancestors()
+ .find(|node| {
+ ast::CallExpr::can_cast(node.kind())
+ || ast::MethodCallExpr::can_cast(node.kind())
+ })
+ .and_then(|node| node.parent())
+ .and_then(node_to_pats)
+ .unwrap_or(Vec::new());
+
+ let tuple_pats = pats.iter().filter_map(|pat| match pat {
+ ast::Pat::TuplePat(tuple_pat) => Some(tuple_pat),
+ _ => None,
+ });
+ for tuple_pat in tuple_pats {
+ ted::replace(
+ tuple_pat.syntax(),
+ make::tuple_struct_pat(
+ make::path_from_text(struct_name),
+ tuple_pat.fields(),
+ )
+ .clone_for_update()
+ .syntax(),
+ );
+ }
+ }
+ // add imports across modules where needed
+ if let Some((import_scope, path)) = import_data {
+ insert_use(&import_scope, path, &ctx.config.insert_use);
+ }
+ })
+ }
+}
+
+fn node_to_pats(node: SyntaxNode) -> Option<Vec<ast::Pat>> {
+ match_ast! {
+ match node {
+ ast::LetStmt(it) => it.pat().map(|pat| vec![pat]),
+ ast::LetExpr(it) => it.pat().map(|pat| vec![pat]),
+ ast::MatchExpr(it) => it.match_arm_list().map(|arm_list| {
+ arm_list.arms().filter_map(|arm| arm.pat()).collect()
+ }),
+ _ => None,
+ }
+ }
+}
+
+fn augment_references_with_imports(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ references: &[FileReference],
+ struct_name: &str,
+ target_module: &hir::Module,
+) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> {
+ let mut visited_modules = FxHashSet::default();
+
+ references
+ .iter()
+ .filter_map(|FileReference { name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
+ })
+ .map(|(name, ref_module)| {
+ let new_name = edit.make_mut(name.clone());
+
+ // if the referenced module is not the same as the target one and has not been seen before, add an import
+ let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
+ && !visited_modules.contains(&ref_module)
+ {
+ visited_modules.insert(ref_module);
+
+ let import_scope =
+ ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
+ let path = ref_module
+ .find_use_path_prefixed(
+ ctx.sema.db,
+ ModuleDef::Module(*target_module),
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
+ .map(|mod_path| {
+ make::path_concat(
+ mod_path_to_ast(&mod_path),
+ make::path_from_text(struct_name),
+ )
+ });
+
+ import_scope.zip(path)
+ } else {
+ None
+ };
+
+ (new_name, import_data)
+ })
+ .collect()
+}
+
+// Adds the definition of the tuple struct before the parent function.
+fn add_tuple_struct_def(
+ edit: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ usages: &UsageSearchResult,
+ parent: &SyntaxNode,
+ tuple_ty: &ast::TupleType,
+ struct_name: &str,
+ target_module: &hir::Module,
+) {
+ let make_struct_pub = usages
+ .iter()
+ .flat_map(|(_, refs)| refs)
+ .filter_map(|FileReference { name, .. }| {
+ let name = name.clone().into_name_like()?;
+ ctx.sema.scope(name.syntax()).map(|scope| scope.module())
+ })
+ .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
+ let visibility = if make_struct_pub { Some(make::visibility_pub()) } else { None };
+
+ let field_list = ast::FieldList::TupleFieldList(make::tuple_field_list(
+ tuple_ty.fields().map(|ty| make::tuple_field(visibility.clone(), ty)),
+ ));
+ let struct_name = make::name(struct_name);
+ let struct_def = make::struct_(visibility, struct_name, None, field_list).clone_for_update();
+
+ let indent = IndentLevel::from_node(parent);
+ struct_def.reindent_to(indent);
+
+ edit.insert(parent.text_range().start(), format!("{struct_def}\n\n{indent}"));
+}
+
+/// Replaces each returned tuple in `body` with the constructor of the tuple struct named `struct_name`.
+fn replace_body_return_values(body: ast::Expr, struct_name: &str) {
+ let mut exprs_to_wrap = Vec::new();
+
+ let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
+ walk_expr(&body, &mut |expr| {
+ if let ast::Expr::ReturnExpr(ret_expr) = expr {
+ if let Some(ret_expr_arg) = &ret_expr.expr() {
+ for_each_tail_expr(ret_expr_arg, tail_cb);
+ }
+ }
+ });
+ for_each_tail_expr(&body, tail_cb);
+
+ for ret_expr in exprs_to_wrap {
+ if let ast::Expr::TupleExpr(tuple_expr) = &ret_expr {
+ let struct_constructor = make::expr_call(
+ make::expr_path(make::ext::ident_path(struct_name)),
+ make::arg_list(tuple_expr.fields()),
+ )
+ .clone_for_update();
+ ted::replace(ret_expr.syntax(), struct_constructor.syntax());
+ }
+ }
+}
+
+fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
+ match e {
+ ast::Expr::BreakExpr(break_expr) => {
+ if let Some(break_expr_arg) = break_expr.expr() {
+ for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
+ }
+ }
+ ast::Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
+ }
+ e => acc.push(e.clone()),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn function_basic() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(&'static str, bool) {
+ ("bar", true)
+}
+"#,
+ r#"
+struct BarResult(&'static str, bool);
+
+fn bar() -> BarResult {
+ BarResult("bar", true)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn struct_and_usages_indented() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+mod foo {
+ pub(crate) fn foo() {
+ let (bar, baz) = bar();
+ println!("{bar} {baz}");
+ }
+
+ pub(crate) fn bar() -> $0(usize, bool) {
+ (42, true)
+ }
+}
+"#,
+ r#"
+mod foo {
+ pub(crate) fn foo() {
+ let BarResult(bar, baz) = bar();
+ println!("{bar} {baz}");
+ }
+
+ struct BarResult(usize, bool);
+
+ pub(crate) fn bar() -> BarResult {
+ BarResult(42, true)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn field_usage() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ let bar_result = bar();
+ println!("{} {}", bar_result.1, bar().0);
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ let bar_result = bar();
+ println!("{} {}", bar_result.1, bar().0);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn method_usage() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+struct Foo;
+
+impl Foo {
+ fn foo(&self, x: usize) -> $0(usize, usize) {
+ (x, x)
+ }
+}
+
+fn main() {
+ let foo = Foo {};
+ let (x, y) = foo.foo(2);
+}
+"#,
+ r#"
+struct Foo;
+
+struct FooResult(usize, usize);
+
+impl Foo {
+ fn foo(&self, x: usize) -> FooResult {
+ FooResult(x, x)
+ }
+}
+
+fn main() {
+ let foo = Foo {};
+ let FooResult(x, y) = foo.foo(2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn method_usage_within_same_impl() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+struct Foo;
+
+impl Foo {
+ fn new() -> $0(usize, usize) {
+ (0, 0)
+ }
+
+ fn foo() {
+ let (mut foo1, mut foo2) = Self::new();
+ }
+}
+"#,
+ r#"
+struct Foo;
+
+struct NewResult(usize, usize);
+
+impl Foo {
+ fn new() -> NewResult {
+ NewResult(0, 0)
+ }
+
+ fn foo() {
+ let NewResult(mut foo1, mut foo2) = Self::new();
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn multiple_usages() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, usize) {
+ (42, 24)
+}
+
+fn main() {
+ let bar_result = bar();
+ let (foo, b) = bar();
+ let (b, baz) = bar();
+
+ if foo == b && b == baz {
+ println!("{} {}", bar_result.1, bar().0);
+ }
+}
+"#,
+ r#"
+struct BarResult(usize, usize);
+
+fn bar() -> BarResult {
+ BarResult(42, 24)
+}
+
+fn main() {
+ let bar_result = bar();
+ let BarResult(foo, b) = bar();
+ let BarResult(b, baz) = bar();
+
+ if foo == b && b == baz {
+ println!("{} {}", bar_result.1, bar().0);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn usage_match_tuple_pat() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ match bar() {
+ x if x.0 == 0 => println!("0"),
+ (x, false) => println!("{x}"),
+ (42, true) => println!("bar"),
+ _ => println!("foo"),
+ }
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ match bar() {
+ x if x.0 == 0 => println!("0"),
+ BarResult(x, false) => println!("{x}"),
+ BarResult(42, true) => println!("bar"),
+ _ => println!("foo"),
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn usage_if_let_tuple_pat() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ if let (42, true) = bar() {
+ println!("bar")
+ }
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ if let BarResult(42, true) = bar() {
+ println!("bar")
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_nested_outer() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ fn foo() -> (usize, bool) {
+ (42, true)
+ }
+
+ foo()
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ fn foo() -> (usize, bool) {
+ (42, true)
+ }
+
+ foo()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_nested_inner() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> (usize, bool) {
+ fn foo() -> $0(usize, bool) {
+ (42, true)
+ }
+
+ foo()
+}
+"#,
+ r#"
+fn bar() -> (usize, bool) {
+ struct FooResult(usize, bool);
+
+ fn foo() -> FooResult {
+ FooResult(42, true)
+ }
+
+ foo()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn trait_impl_and_usage() {
+ cov_mark::check!(replace_trait_impl_fns);
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+struct Struct;
+
+trait Foo {
+ fn foo(&self) -> $0(usize, bool);
+}
+
+impl Foo for Struct {
+ fn foo(&self) -> (usize, bool) {
+ (0, true)
+ }
+}
+
+fn main() {
+ let s = Struct {};
+ let (foo, bar) = s.foo();
+ let (foo, bar) = Struct::foo(&s);
+ println!("{foo} {bar}");
+}
+"#,
+ r#"
+struct Struct;
+
+struct FooResult(usize, bool);
+
+trait Foo {
+ fn foo(&self) -> FooResult;
+}
+
+impl Foo for Struct {
+ fn foo(&self) -> FooResult {
+ FooResult(0, true)
+ }
+}
+
+fn main() {
+ let s = Struct {};
+ let FooResult(foo, bar) = s.foo();
+ let FooResult(foo, bar) = Struct::foo(&s);
+ println!("{foo} {bar}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_wraps_nested() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn foo() -> $0(u8, usize, u32) {
+ if true {
+ match 3 {
+ 0 => (1, 2, 3),
+ _ => return (4, 5, 6),
+ }
+ } else {
+ (2, 1, 3)
+ }
+}
+"#,
+ r#"
+struct FooResult(u8, usize, u32);
+
+fn foo() -> FooResult {
+ if true {
+ match 3 {
+ 0 => FooResult(1, 2, 3),
+ _ => return FooResult(4, 5, 6),
+ }
+ } else {
+ FooResult(2, 1, 3)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_wraps_break_and_return() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn foo(mut i: isize) -> (usize, $0u32, u8) {
+ if i < 0 {
+ return (0, 0, 0);
+ }
+
+ loop {
+ if i == 2 {
+ println!("foo");
+ break (1, 2, 3);
+ }
+ i += 1;
+ }
+}
+"#,
+ r#"
+struct FooResult(usize, u32, u8);
+
+fn foo(mut i: isize) -> FooResult {
+ if i < 0 {
+ return FooResult(0, 0, 0);
+ }
+
+ loop {
+ if i == 2 {
+ println!("foo");
+ break FooResult(1, 2, 3);
+ }
+ i += 1;
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_doesnt_wrap_identifier() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn foo() -> $0(u8, usize, u32) {
+ let tuple = (1, 2, 3);
+ tuple
+}
+"#,
+ r#"
+struct FooResult(u8, usize, u32);
+
+fn foo() -> FooResult {
+ let tuple = (1, 2, 3);
+ tuple
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn body_doesnt_wrap_other_exprs() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar(num: usize) -> (u8, usize, u32) {
+ (1, num, 3)
+}
+
+fn foo() -> $0(u8, usize, u32) {
+ bar(2)
+}
+"#,
+ r#"
+fn bar(num: usize) -> (u8, usize, u32) {
+ (1, num, 3)
+}
+
+struct FooResult(u8, usize, u32);
+
+fn foo() -> FooResult {
+ bar(2)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn cross_file_and_module() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ let (bar, baz) = bar::bar();
+ println!("{}", bar == baz);
+}
+
+//- /foo.rs
+pub mod bar {
+ pub fn bar() -> $0(usize, usize) {
+ (1, 3)
+ }
+}
+"#,
+ r#"
+//- /main.rs
+use crate::foo::bar::BarResult;
+
+mod foo;
+
+fn main() {
+ use foo::bar;
+
+ let BarResult(bar, baz) = bar::bar();
+ println!("{}", bar == baz);
+}
+
+//- /foo.rs
+pub mod bar {
+ pub struct BarResult(pub usize, pub usize);
+
+ pub fn bar() -> BarResult {
+ BarResult(1, 3)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn does_not_replace_nested_usage() {
+ check_assist(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(usize, bool) {
+ (42, true)
+}
+
+fn main() {
+ let ((bar1, bar2), foo) = (bar(), 3);
+ println!("{bar1} {bar2} {foo}");
+}
+"#,
+ r#"
+struct BarResult(usize, bool);
+
+fn bar() -> BarResult {
+ BarResult(42, true)
+}
+
+fn main() {
+ let ((bar1, bar2), foo) = (bar(), 3);
+ println!("{bar1} {bar2} {foo}");
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_with_non_tuple_return_type() {
+ check_assist_not_applicable(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0usize {
+ 0
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn function_with_impl_type() {
+ check_assist_not_applicable(
+ convert_tuple_return_type_to_struct,
+ r#"
+fn bar() -> $0(impl Clone, usize) {
+ ("bar", 0)
+}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 017853a4a..435d7c4a5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -216,7 +216,7 @@ fn edit_field_references(
edit.edit_file(file_id);
for r in refs {
if let Some(name_ref) = r.name.as_name_ref() {
- edit.replace(name_ref.syntax().text_range(), name.text());
+ edit.replace(ctx.sema.original_range(name_ref.syntax()).range, name.text());
}
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index f30ca2552..65b497e83 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -3,10 +3,12 @@ use ide_db::{
defs::Definition,
search::{FileReference, SearchScope, UsageSearchResult},
};
+use itertools::Itertools;
use syntax::{
- ast::{self, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
- TextRange,
+ ast::{self, make, AstNode, FieldExpr, HasName, IdentPat, MethodCallExpr},
+ ted, T,
};
+use text_edit::TextRange;
use crate::assist_context::{AssistContext, Assists, SourceChangeBuilder};
@@ -61,27 +63,36 @@ pub(crate) fn destructure_tuple_binding_impl(
acc.add(
AssistId("destructure_tuple_binding_in_sub_pattern", AssistKind::RefactorRewrite),
"Destructure tuple in sub-pattern",
- data.range,
- |builder| {
- edit_tuple_assignment(ctx, builder, &data, true);
- edit_tuple_usages(&data, builder, ctx, true);
- },
+ data.ident_pat.syntax().text_range(),
+ |edit| destructure_tuple_edit_impl(ctx, edit, &data, true),
);
}
acc.add(
AssistId("destructure_tuple_binding", AssistKind::RefactorRewrite),
if with_sub_pattern { "Destructure tuple in place" } else { "Destructure tuple" },
- data.range,
- |builder| {
- edit_tuple_assignment(ctx, builder, &data, false);
- edit_tuple_usages(&data, builder, ctx, false);
- },
+ data.ident_pat.syntax().text_range(),
+ |edit| destructure_tuple_edit_impl(ctx, edit, &data, false),
);
Some(())
}
+fn destructure_tuple_edit_impl(
+ ctx: &AssistContext<'_>,
+ edit: &mut SourceChangeBuilder,
+ data: &TupleData,
+ in_sub_pattern: bool,
+) {
+ let assignment_edit = edit_tuple_assignment(ctx, edit, &data, in_sub_pattern);
+ let current_file_usages_edit = edit_tuple_usages(&data, edit, ctx, in_sub_pattern);
+
+ assignment_edit.apply();
+ if let Some(usages_edit) = current_file_usages_edit {
+ usages_edit.into_iter().for_each(|usage_edit| usage_edit.apply(edit))
+ }
+}
+
fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleData> {
if ident_pat.at_token().is_some() {
// Cannot destructure pattern with sub-pattern:
@@ -109,7 +120,6 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
}
let name = ident_pat.name()?.to_string();
- let range = ident_pat.syntax().text_range();
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
Definition::Local(def)
@@ -122,7 +132,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
.map(|i| generate_name(ctx, i, &name, &ident_pat, &usages))
.collect::<Vec<_>>();
- Some(TupleData { ident_pat, range, ref_type, field_names, usages })
+ Some(TupleData { ident_pat, ref_type, field_names, usages })
}
fn generate_name(
@@ -142,72 +152,100 @@ enum RefType {
}
struct TupleData {
ident_pat: IdentPat,
- // name: String,
- range: TextRange,
ref_type: Option<RefType>,
field_names: Vec<String>,
- // field_types: Vec<Type>,
usages: Option<UsageSearchResult>,
}
fn edit_tuple_assignment(
ctx: &AssistContext<'_>,
- builder: &mut SourceChangeBuilder,
+ edit: &mut SourceChangeBuilder,
data: &TupleData,
in_sub_pattern: bool,
-) {
+) -> AssignmentEdit {
+ let ident_pat = edit.make_mut(data.ident_pat.clone());
+
let tuple_pat = {
let original = &data.ident_pat;
let is_ref = original.ref_token().is_some();
let is_mut = original.mut_token().is_some();
- let fields = data.field_names.iter().map(|name| {
- ast::Pat::from(ast::make::ident_pat(is_ref, is_mut, ast::make::name(name)))
- });
- ast::make::tuple_pat(fields)
+ let fields = data
+ .field_names
+ .iter()
+ .map(|name| ast::Pat::from(make::ident_pat(is_ref, is_mut, make::name(name))));
+ make::tuple_pat(fields).clone_for_update()
};
- let add_cursor = |text: &str| {
- // place cursor on first tuple item
- let first_tuple = &data.field_names[0];
- text.replacen(first_tuple, &format!("$0{first_tuple}"), 1)
- };
+ if let Some(cap) = ctx.config.snippet_cap {
+ // place cursor on first tuple name
+ if let Some(ast::Pat::IdentPat(first_pat)) = tuple_pat.fields().next() {
+ edit.add_tabstop_before(
+ cap,
+ first_pat.name().expect("first ident pattern should have a name"),
+ )
+ }
+ }
- // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
- if in_sub_pattern {
- let text = format!(" @ {tuple_pat}");
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snip = add_cursor(&text);
- builder.insert_snippet(cap, data.range.end(), snip);
- }
- None => builder.insert(data.range.end(), text),
- };
- } else {
- let text = tuple_pat.to_string();
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snip = add_cursor(&text);
- builder.replace_snippet(cap, data.range, snip);
- }
- None => builder.replace(data.range, text),
- };
+ AssignmentEdit { ident_pat, tuple_pat, in_sub_pattern }
+}
+struct AssignmentEdit {
+ ident_pat: ast::IdentPat,
+ tuple_pat: ast::TuplePat,
+ in_sub_pattern: bool,
+}
+
+impl AssignmentEdit {
+ fn apply(self) {
+ // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)`
+ if self.in_sub_pattern {
+ self.ident_pat.set_pat(Some(self.tuple_pat.into()))
+ } else {
+ ted::replace(self.ident_pat.syntax(), self.tuple_pat.syntax())
+ }
}
}
fn edit_tuple_usages(
data: &TupleData,
- builder: &mut SourceChangeBuilder,
+ edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
in_sub_pattern: bool,
-) {
+) -> Option<Vec<EditTupleUsage>> {
+ let mut current_file_usages = None;
+
if let Some(usages) = data.usages.as_ref() {
- for (file_id, refs) in usages.iter() {
- builder.edit_file(*file_id);
+ // We need to collect edits first before actually applying them
+ // as mapping nodes to their mutable node versions requires an
+ // unmodified syntax tree.
+ //
+ // We also defer editing usages in the current file first since
+ // tree mutation in the same file breaks when `builder.edit_file`
+ // is called
+
+ if let Some((_, refs)) = usages.iter().find(|(file_id, _)| **file_id == ctx.file_id()) {
+ current_file_usages = Some(
+ refs.iter()
+ .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
+ .collect_vec(),
+ );
+ }
- for r in refs {
- edit_tuple_usage(ctx, builder, r, data, in_sub_pattern);
+ for (file_id, refs) in usages.iter() {
+ if *file_id == ctx.file_id() {
+ continue;
}
+
+ edit.edit_file(*file_id);
+
+ let tuple_edits = refs
+ .iter()
+ .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern))
+ .collect_vec();
+
+ tuple_edits.into_iter().for_each(|tuple_edit| tuple_edit.apply(edit))
}
}
+
+ current_file_usages
}
fn edit_tuple_usage(
ctx: &AssistContext<'_>,
@@ -215,25 +253,14 @@ fn edit_tuple_usage(
usage: &FileReference,
data: &TupleData,
in_sub_pattern: bool,
-) {
+) -> Option<EditTupleUsage> {
match detect_tuple_index(usage, data) {
- Some(index) => edit_tuple_field_usage(ctx, builder, data, index),
- None => {
- if in_sub_pattern {
- cov_mark::hit!(destructure_tuple_call_with_subpattern);
- return;
- }
-
- // no index access -> make invalid -> requires handling by user
- // -> put usage in block comment
- //
- // Note: For macro invocations this might result in still valid code:
- // When a macro accepts the tuple as argument, as well as no arguments at all,
- // uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
- // But this is an unlikely case. Usually the resulting macro call will become erroneous.
- builder.insert(usage.range.start(), "/*");
- builder.insert(usage.range.end(), "*/");
+ Some(index) => Some(edit_tuple_field_usage(ctx, builder, data, index)),
+ None if in_sub_pattern => {
+ cov_mark::hit!(destructure_tuple_call_with_subpattern);
+ return None;
}
+ None => Some(EditTupleUsage::NoIndex(usage.range)),
}
}
@@ -242,19 +269,47 @@ fn edit_tuple_field_usage(
builder: &mut SourceChangeBuilder,
data: &TupleData,
index: TupleIndex,
-) {
+) -> EditTupleUsage {
let field_name = &data.field_names[index.index];
+ let field_name = make::expr_path(make::ext::ident_path(field_name));
if data.ref_type.is_some() {
- let ref_data = handle_ref_field_usage(ctx, &index.field_expr);
- builder.replace(ref_data.range, ref_data.format(field_name));
+ let (replace_expr, ref_data) = handle_ref_field_usage(ctx, &index.field_expr);
+ let replace_expr = builder.make_mut(replace_expr);
+ EditTupleUsage::ReplaceExpr(replace_expr, ref_data.wrap_expr(field_name))
} else {
- builder.replace(index.range, field_name);
+ let field_expr = builder.make_mut(index.field_expr);
+ EditTupleUsage::ReplaceExpr(field_expr.into(), field_name)
+ }
+}
+enum EditTupleUsage {
+ /// no index access -> make invalid -> requires handling by user
+ /// -> put usage in block comment
+ ///
+ /// Note: For macro invocations this might result in still valid code:
+ /// When a macro accepts the tuple as argument, as well as no arguments at all,
+ /// uncommenting the tuple still leaves the macro call working (see `tests::in_macro_call::empty_macro`).
+ /// But this is an unlikely case. Usually the resulting macro call will become erroneous.
+ NoIndex(TextRange),
+ ReplaceExpr(ast::Expr, ast::Expr),
+}
+
+impl EditTupleUsage {
+ fn apply(self, edit: &mut SourceChangeBuilder) {
+ match self {
+ EditTupleUsage::NoIndex(range) => {
+ edit.insert(range.start(), "/*");
+ edit.insert(range.end(), "*/");
+ }
+ EditTupleUsage::ReplaceExpr(target_expr, replace_with) => {
+ ted::replace(target_expr.syntax(), replace_with.clone_for_update().syntax())
+ }
+ }
}
}
+
struct TupleIndex {
index: usize,
- range: TextRange,
field_expr: FieldExpr,
}
fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIndex> {
@@ -296,7 +351,7 @@ fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIn
return None;
}
- Some(TupleIndex { index: idx, range: field_expr.syntax().text_range(), field_expr })
+ Some(TupleIndex { index: idx, field_expr })
} else {
// tuple index out of range
None
@@ -307,32 +362,34 @@ fn detect_tuple_index(usage: &FileReference, data: &TupleData) -> Option<TupleIn
}
struct RefData {
- range: TextRange,
needs_deref: bool,
needs_parentheses: bool,
}
impl RefData {
- fn format(&self, field_name: &str) -> String {
- match (self.needs_deref, self.needs_parentheses) {
- (true, true) => format!("(*{field_name})"),
- (true, false) => format!("*{field_name}"),
- (false, true) => format!("({field_name})"),
- (false, false) => field_name.to_string(),
+ fn wrap_expr(&self, mut expr: ast::Expr) -> ast::Expr {
+ if self.needs_deref {
+ expr = make::expr_prefix(T![*], expr);
}
+
+ if self.needs_parentheses {
+ expr = make::expr_paren(expr);
+ }
+
+ return expr;
}
}
-fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> RefData {
+fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> (ast::Expr, RefData) {
let s = field_expr.syntax();
- let mut ref_data =
- RefData { range: s.text_range(), needs_deref: true, needs_parentheses: true };
+ let mut ref_data = RefData { needs_deref: true, needs_parentheses: true };
+ let mut target_node = field_expr.clone().into();
let parent = match s.parent().map(ast::Expr::cast) {
Some(Some(parent)) => parent,
Some(None) => {
ref_data.needs_parentheses = false;
- return ref_data;
+ return (target_node, ref_data);
}
- None => return ref_data,
+ None => return (target_node, ref_data),
};
match parent {
@@ -342,7 +399,7 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
// there might be a ref outside: `&(t.0)` -> can be removed
if let Some(it) = it.syntax().parent().and_then(ast::RefExpr::cast) {
ref_data.needs_deref = false;
- ref_data.range = it.syntax().text_range();
+ target_node = it.into();
}
}
ast::Expr::RefExpr(it) => {
@@ -351,8 +408,8 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
ref_data.needs_parentheses = false;
// might be surrounded by parens -> can be removed too
match it.syntax().parent().and_then(ast::ParenExpr::cast) {
- Some(parent) => ref_data.range = parent.syntax().text_range(),
- None => ref_data.range = it.syntax().text_range(),
+ Some(parent) => target_node = parent.into(),
+ None => target_node = it.into(),
};
}
// higher precedence than deref `*`
@@ -414,7 +471,7 @@ fn handle_ref_field_usage(ctx: &AssistContext<'_>, field_expr: &FieldExpr) -> Re
}
};
- ref_data
+ (target_node, ref_data)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
index ddc8a50ed..c859e9852 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
@@ -33,9 +33,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
- if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
- return None;
- }
+ Whitespace::cast(prev).filter(|w| w.text().contains('\n'))?;
}
let indentation = IndentLevel::from_token(comment.syntax()).to_string();
@@ -50,7 +48,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
(
TextRange::new(
comments[0].syntax().text_range().start(),
- comments.last().unwrap().syntax().text_range().end(),
+ comments.last()?.syntax().text_range().end(),
),
Either::Right(comments),
)
@@ -71,9 +69,11 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.map(|l| l.strip_prefix(&indentation).unwrap_or(l))
.join("\n")
}
- Either::Right(comments) => {
- comments.into_iter().map(|c| line_comment_text(IndentLevel(0), c)).join("\n")
- }
+ Either::Right(comments) => comments
+ .into_iter()
+ .map(|cm| line_comment_text(IndentLevel(0), cm))
+ .collect::<Vec<_>>()
+ .join("\n"),
};
let hashes = "#".repeat(required_hashes(&text));
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
index 31a1ff496..9d72d3af0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
@@ -1,4 +1,5 @@
use crate::{AssistContext, Assists};
+use hir::DescendPreference;
use ide_db::{
assists::{AssistId, AssistKind},
syntax_helpers::{
@@ -35,7 +36,8 @@ pub(crate) fn extract_expressions_from_format_string(
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let expanded_t = ast::String::cast(
- ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
+ ctx.sema
+ .descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()),
)?;
if !is_format_string(&expanded_t) {
return None;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index de591cfde..347a3e9ba 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -3,8 +3,8 @@ use std::iter;
use ast::make;
use either::Either;
use hir::{
- HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
- TypeInfo, TypeParam,
+ DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef,
+ PathResolution, Semantics, TypeInfo, TypeParam,
};
use ide_db::{
defs::{Definition, NameRefClass},
@@ -147,7 +147,12 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
_ => format_function(ctx, module, &fun, old_indent, new_indent),
};
- if fn_def.contains("ControlFlow") {
+ // There are external control flows
+ if fun
+ .control_flow
+ .kind
+ .is_some_and(|kind| matches!(kind, FlowKind::Break(_, _) | FlowKind::Continue(_)))
+ {
let scope = match scope {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
@@ -163,6 +168,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
ModuleDef::from(control_flow_enum),
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
);
if let Some(mod_path) = mod_path {
@@ -750,7 +756,7 @@ impl FunctionBody {
.descendants_with_tokens()
.filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
- .flat_map(|t| sema.descend_into_macros(t, 0.into()))
+ .flat_map(|t| sema.descend_into_macros(DescendPreference::None, t))
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
}
}
@@ -4970,6 +4976,27 @@ fn $0fun_name(arg: &mut Foo) {
"#,
);
}
+ #[test]
+ fn does_not_import_control_flow() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn func() {
+ $0let cf = "I'm ControlFlow";$0
+}
+"#,
+ r#"
+fn func() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ let cf = "I'm ControlFlow";
+}
+"#,
+ );
+ }
#[test]
fn extract_function_copies_comment_at_start() {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
index 6839c5820..4b9fedc7e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -3,7 +3,7 @@ use std::{
iter,
};
-use hir::{HasSource, ModuleSource};
+use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
index e4f64ccc7..37db27a8f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs
@@ -384,6 +384,7 @@ fn process_references(
*enum_module_def,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
);
if let Some(mut mod_path) = mod_path {
mod_path.pop_segment();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
index 014c23197..e7c884dcb 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -29,22 +29,31 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// }
// ```
pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- if ctx.has_empty_selection() {
- return None;
- }
-
- let node = match ctx.covering_element() {
- NodeOrToken::Node(it) => it,
- NodeOrToken::Token(it) if it.kind() == COMMENT => {
- cov_mark::hit!(extract_var_in_comment_is_not_applicable);
+ let node = if ctx.has_empty_selection() {
+ if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() {
+ expr_stmt.syntax().clone()
+ } else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
+ expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
+ } else {
return None;
}
- NodeOrToken::Token(it) => it.parent()?,
+ } else {
+ match ctx.covering_element() {
+ NodeOrToken::Node(it) => it,
+ NodeOrToken::Token(it) if it.kind() == COMMENT => {
+ cov_mark::hit!(extract_var_in_comment_is_not_applicable);
+ return None;
+ }
+ NodeOrToken::Token(it) => it.parent()?,
+ }
};
+
let node = node.ancestors().take_while(|anc| anc.text_range() == node.text_range()).last()?;
+ let range = node.text_range();
+
let to_extract = node
.descendants()
- .take_while(|it| ctx.selection_trimmed().contains_range(it.text_range()))
+ .take_while(|it| range.contains_range(it.text_range()))
.find_map(valid_target_expr)?;
let ty = ctx.sema.type_of_expr(&to_extract).map(TypeInfo::adjusted);
@@ -236,6 +245,138 @@ mod tests {
use super::*;
#[test]
+ fn test_extract_var_simple_without_select() {
+ check_assist(
+ extract_variable,
+ r#"
+fn main() -> i32 {
+ if true {
+ 1
+ } else {
+ 2
+ }$0
+}
+"#,
+ r#"
+fn main() -> i32 {
+ let $0var_name = if true {
+ 1
+ } else {
+ 2
+ };
+ var_name
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn foo() -> i32 { 1 }
+fn main() {
+ foo();$0
+}
+"#,
+ r#"
+fn foo() -> i32 { 1 }
+fn main() {
+ let $0foo = foo();
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ let a = Some(2);
+ a.is_some();$0
+}
+"#,
+ r#"
+fn main() {
+ let a = Some(2);
+ let $0is_some = a.is_some();
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ "hello"$0;
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = "hello";
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ 1 + 2$0;
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = 1 + 2;
+}
+"#,
+ );
+
+ check_assist(
+ extract_variable,
+ r#"
+fn main() {
+ match () {
+ () if true => 1,
+ _ => 2,
+ };$0
+}
+"#,
+ r#"
+fn main() {
+ let $0var_name = match () {
+ () if true => 1,
+ _ => 2,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_unit_expr_without_select_not_applicable() {
+ check_assist_not_applicable(
+ extract_variable,
+ r#"
+fn foo() {}
+fn main() {
+ foo()$0;
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ extract_variable,
+ r#"
+fn foo() {
+ let mut i = 3;
+ if i >= 0 {
+ i += 1;
+ } else {
+ i -= 1;
+ }$0
+}"#,
+ );
+ }
+
+ #[test]
fn test_extract_var_simple() {
check_assist(
extract_variable,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
index c9f272474..204e796fa 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -1,4 +1,6 @@
-use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
+use hir::{
+ db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
+};
use ide_db::base_db::FileId;
use syntax::{
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
index 2ea6f58fa..8b46a23f9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -19,8 +19,19 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ```
pub(crate) fn flip_binexpr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let expr = ctx.find_node_at_offset::<BinExpr>()?;
- let lhs = expr.lhs()?.syntax().clone();
let rhs = expr.rhs()?.syntax().clone();
+ let lhs = expr.lhs()?.syntax().clone();
+
+ let lhs = if let Some(bin_expr) = BinExpr::cast(lhs.clone()) {
+ if bin_expr.op_kind() == expr.op_kind() {
+ bin_expr.rhs()?.syntax().clone()
+ } else {
+ lhs
+ }
+ } else {
+ lhs
+ };
+
let op_range = expr.op_token()?.text_range();
// The assist should be applied only if the cursor is on the operator
let cursor_in_range = op_range.contains_range(ctx.selection_trimmed());
@@ -115,6 +126,24 @@ mod tests {
}
#[test]
+ fn flip_binexpr_works_for_lhs_arith() {
+ check_assist(
+ flip_binexpr,
+ r"fn f() { let res = 1 + (2 - 3) +$0 4 + 5; }",
+ r"fn f() { let res = 1 + 4 + (2 - 3) + 5; }",
+ )
+ }
+
+ #[test]
+ fn flip_binexpr_works_for_lhs_cmp() {
+ check_assist(
+ flip_binexpr,
+ r"fn f() { let res = 1 + (2 - 3) >$0 4 + 5; }",
+ r"fn f() { let res = 4 + 5 < 1 + (2 - 3); }",
+ )
+ }
+
+ #[test]
fn flip_binexpr_works_inside_match() {
check_assist(
flip_binexpr,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
index eccd7675f..a4e8e7388 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs
@@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists};
-use hir::{HasVisibility, HirDisplay, Module};
+use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::{FileId, Upcast},
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index bbac0a26e..db1e0ceae 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,6 +1,7 @@
use std::collections::HashSet;
-use hir::{self, HasCrate, HasSource, HasVisibility};
+use hir::{self, HasCrate, HasVisibility};
+use ide_db::path_transform::PathTransform;
use syntax::{
ast::{
self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
@@ -105,7 +106,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
target,
|edit| {
// Create the function
- let method_source = match method.source(ctx.db()) {
+ let method_source = match ctx.sema.source(method) {
Some(source) => source.value,
None => return,
};
@@ -130,7 +131,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
vis,
fn_name,
type_params,
- None,
+ method_source.where_clause(),
params,
body,
ret_type,
@@ -183,6 +184,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let assoc_items = impl_def.get_or_create_assoc_item_list();
assoc_items.add_item(f.clone().into());
+ if let Some((target, source)) =
+ ctx.sema.scope(strukt.syntax()).zip(ctx.sema.scope(method_source.syntax()))
+ {
+ PathTransform::generic_transformation(&target, &source).apply(f.syntax());
+ }
+
if let Some(cap) = ctx.config.snippet_cap {
edit.add_tabstop_before(cap, f)
}
@@ -455,6 +462,209 @@ impl Person {
}
#[test]
+ fn test_preserve_where_clause() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Inner<T>(T);
+impl<T> Inner<T> {
+ fn get(&self) -> T
+ where
+ T: Copy,
+ T: PartialEq,
+ {
+ self.0
+ }
+}
+
+struct Struct<T> {
+ $0field: Inner<T>,
+}
+"#,
+ r#"
+struct Inner<T>(T);
+impl<T> Inner<T> {
+ fn get(&self) -> T
+ where
+ T: Copy,
+ T: PartialEq,
+ {
+ self.0
+ }
+}
+
+struct Struct<T> {
+ field: Inner<T>,
+}
+
+impl<T> Struct<T> {
+ $0fn get(&self) -> T where
+ T: Copy,
+ T: PartialEq, {
+ self.field.get()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_basic_self_references() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Foo {
+ field: $0Bar,
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&self, other: Self) -> Self {
+ other
+ }
+}
+"#,
+ r#"
+struct Foo {
+ field: Bar,
+}
+
+impl Foo {
+ $0fn bar(&self, other: Bar) -> Bar {
+ self.field.bar(other)
+ }
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&self, other: Self) -> Self {
+ other
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_nested_self_references() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Foo {
+ field: $0Bar,
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&mut self, a: (Self, [Self; 4]), b: Vec<Self>) {}
+}
+"#,
+ r#"
+struct Foo {
+ field: Bar,
+}
+
+impl Foo {
+ $0fn bar(&mut self, a: (Bar, [Bar; 4]), b: Vec<Bar>) {
+ self.field.bar(a, b)
+ }
+}
+
+struct Bar;
+
+impl Bar {
+ fn bar(&mut self, a: (Self, [Self; 4]), b: Vec<Self>) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_self_references_with_lifetimes_and_generics() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+struct Foo<'a, T> {
+ $0field: Bar<'a, T>,
+}
+
+struct Bar<'a, T>(&'a T);
+
+impl<'a, T> Bar<'a, T> {
+ fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
+ b.pop().unwrap()
+ }
+}
+"#,
+ r#"
+struct Foo<'a, T> {
+ field: Bar<'a, T>,
+}
+
+impl<'a, T> Foo<'a, T> {
+ $0fn bar(self, mut b: Vec<&'a Bar<'_, T>>) -> &'a Bar<'_, T> {
+ self.field.bar(b)
+ }
+}
+
+struct Bar<'a, T>(&'a T);
+
+impl<'a, T> Bar<'a, T> {
+ fn bar(self, mut b: Vec<&'a Self>) -> &'a Self {
+ b.pop().unwrap()
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_fixes_self_references_across_macros() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+//- /bar.rs
+macro_rules! test_method {
+ () => {
+ pub fn test(self, b: Bar) -> Self {
+ self
+ }
+ };
+}
+
+pub struct Bar;
+
+impl Bar {
+ test_method!();
+}
+
+//- /main.rs
+mod bar;
+
+struct Foo {
+ $0bar: bar::Bar,
+}
+"#,
+ r#"
+mod bar;
+
+struct Foo {
+ bar: bar::Bar,
+}
+
+impl Foo {
+ $0pub fn test(self,b:bar::Bar) ->bar::Bar {
+ self.bar.test(b)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_generate_delegate_visibility() {
check_assist_not_applicable(
generate_delegate_methods,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
index 815453961..473c699b5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs
@@ -58,8 +58,12 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
- let trait_path =
- module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?;
+ let trait_path = module.find_use_path(
+ ctx.db(),
+ ModuleDef::Trait(trait_),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?;
let field_type = field.ty()?;
let field_name = field.name()?;
@@ -99,8 +103,12 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
let module = ctx.sema.to_def(&strukt)?.module(ctx.db());
let trait_ = deref_type_to_generate.to_trait(&ctx.sema, module.krate())?;
- let trait_path =
- module.find_use_path(ctx.db(), ModuleDef::Trait(trait_), ctx.config.prefer_no_std)?;
+ let trait_path = module.find_use_path(
+ ctx.db(),
+ ModuleDef::Trait(trait_),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )?;
let field_type = field.ty()?;
let target = field.syntax().text_range();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
index 184f523e0..1a1e992e2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -1,4 +1,4 @@
-use hir::{HasSource, HirDisplay, InFile};
+use hir::{HasSource, HirDisplay, InRealFile};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{
ast::{self, make, HasArgList},
@@ -114,14 +114,14 @@ fn add_variant_to_accumulator(
parent: PathParent,
) -> Option<()> {
let db = ctx.db();
- let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
+ let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
acc.add(
AssistId("generate_enum_variant", AssistKind::Generate),
"Generate variant",
target,
|builder| {
- builder.edit_file(file_id.original_file(db));
+ builder.edit_file(file_id);
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index 5b13e01b1..a113c817f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -1,5 +1,6 @@
use hir::{
- Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo,
+ Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type,
+ TypeInfo,
};
use ide_db::{
base_db::FileId,
@@ -404,7 +405,11 @@ impl FunctionBuilder {
leading_ws,
ret_type: fn_def.ret_type(),
// PANIC: we guarantee we always create a function body with a tail expr
- tail_expr: fn_def.body().unwrap().tail_expr().unwrap(),
+ tail_expr: fn_def
+ .body()
+ .expect("generated function should have a body")
+ .tail_expr()
+ .expect("function body should have a tail expression"),
should_focus_return_type: self.should_focus_return_type,
fn_def,
trailing_ws,
@@ -506,7 +511,7 @@ fn assoc_fn_target_info(
}
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
- match &target {
+ match target {
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
}
@@ -683,7 +688,7 @@ where
{
// This function should be only called with `Impl`, `Trait`, or `Function`, for which it's
// infallible to get source ast.
- let node = ctx.sema.source(def).unwrap().value;
+ let node = ctx.sema.source(def).expect("definition's source couldn't be found").value;
let generic_params = node.generic_param_list().into_iter().flat_map(|it| it.generic_params());
let where_clauses = node.where_clause().into_iter().flat_map(|it| it.predicates());
(generic_params, where_clauses)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
new file mode 100644
index 000000000..cb8ef3956
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -0,0 +1,202 @@
+use ide_db::famous_defs::FamousDefs;
+use syntax::{
+ ast::{self, make},
+ ted, AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredicable case [#15581].
+// Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
+
+// Assist: generate_mut_trait_impl
+//
+// Adds a IndexMut impl from the `Index` trait.
+//
+// ```
+// # //- minicore: index
+// pub enum Axis { X = 0, Y = 1, Z = 2 }
+//
+// impl<T> core::ops::Index$0<Axis> for [T; 3] {
+// type Output = T;
+//
+// fn index(&self, index: Axis) -> &Self::Output {
+// &self[index as usize]
+// }
+// }
+// ```
+// ->
+// ```
+// pub enum Axis { X = 0, Y = 1, Z = 2 }
+//
+// $0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
+// fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+// &self[index as usize]
+// }
+// }
+//
+// impl<T> core::ops::Index<Axis> for [T; 3] {
+// type Output = T;
+//
+// fn index(&self, index: Axis) -> &Self::Output {
+// &self[index as usize]
+// }
+// }
+// ```
+pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
+
+ let trait_ = impl_def.trait_()?;
+ if let ast::Type::PathType(trait_path) = trait_.clone() {
+ let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
+ let scope = ctx.sema.scope(trait_path.syntax())?;
+ if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
+ return None;
+ }
+ }
+
+ // Index -> IndexMut
+ let index_trait = impl_def
+ .syntax()
+ .descendants()
+ .filter_map(ast::NameRef::cast)
+ .find(|it| it.text() == "Index")?;
+ ted::replace(
+ index_trait.syntax(),
+ make::path_segment(make::name_ref("IndexMut")).clone_for_update().syntax(),
+ );
+
+ // index -> index_mut
+ let trait_method_name = impl_def
+ .syntax()
+ .descendants()
+ .filter_map(ast::Name::cast)
+ .find(|it| it.text() == "index")?;
+ ted::replace(trait_method_name.syntax(), make::name("index_mut").clone_for_update().syntax());
+
+ let type_alias = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast)?;
+ ted::remove(type_alias.syntax());
+
+ // &self -> &mut self
+ let mut_self_param = make::mut_self_param();
+ let self_param: ast::SelfParam =
+ impl_def.syntax().descendants().find_map(ast::SelfParam::cast)?;
+ ted::replace(self_param.syntax(), mut_self_param.clone_for_update().syntax());
+
+ // &Self::Output -> &mut Self::Output
+ let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
+ ted::replace(
+ ret_type.syntax(),
+ make::ret_type(make::ty("&mut Self::Output")).clone_for_update().syntax(),
+ );
+
+ let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
+ ast::AssocItem::Fn(f) => Some(f),
+ _ => None,
+ })?;
+
+ let assoc_list = make::assoc_item_list().clone_for_update();
+ assoc_list.add_item(syntax::ast::AssocItem::Fn(fn_));
+ ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
+
+ let target = impl_def.syntax().text_range();
+ acc.add(
+ AssistId("generate_mut_trait_impl", AssistKind::Generate),
+ "Generate `IndexMut` impl from this `Index` trait",
+ target,
+ |edit| {
+ edit.insert(target.start(), format!("$0{}\n\n", impl_def.to_string()));
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_generate_mut_trait_impl() {
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: index
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+impl<T> core::ops::Index$0<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#,
+ r#"
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ &self[index as usize]
+ }
+}
+
+impl<T> core::ops::Index<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#,
+ );
+
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: index
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+}
+"#,
+ r#"
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+$0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+}
+
+impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_mut_trait_impl_not_applicable() {
+ check_assist_not_applicable(
+ generate_mut_trait_impl,
+ r#"
+pub trait Index<Idx: ?Sized> {}
+
+impl<T> Index$0<i32> for [T; 3] {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
index 824255e4f..7bfd59966 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs
@@ -67,6 +67,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
let expr = use_trivial_constructor(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
index ffab58509..5b9cc5f66 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -8,7 +8,7 @@ use ide_db::{
defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform,
- search::{FileReference, SearchScope},
+ search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
RootDatabase,
@@ -148,7 +148,7 @@ pub(super) fn split_refs_and_uses<T: ast::AstNode>(
) -> (Vec<T>, Vec<ast::Path>) {
iter.into_iter()
.filter_map(|file_ref| match file_ref.name {
- ast::NameLike::NameRef(name_ref) => Some(name_ref),
+ FileReferenceNode::NameRef(name_ref) => Some(name_ref),
_ => None,
})
.filter_map(|name_ref| match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
@@ -224,7 +224,6 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
syntax.text_range(),
|builder| {
let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
-
builder.replace_ast(
match call_info.node {
ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
@@ -347,7 +346,7 @@ fn inline(
match param.as_local(sema.db) {
Some(l) => usages_for_locals(l)
.map(|FileReference { name, range, .. }| match name {
- ast::NameLike::NameRef(_) => body
+ FileReferenceNode::NameRef(_) => body
.syntax()
.covering_element(range)
.ancestors()
@@ -363,16 +362,22 @@ fn inline(
.collect();
if function.self_param(sema.db).is_some() {
- let this = || make::name_ref("this").syntax().clone_for_update().first_token().unwrap();
+ let this = || {
+ make::name_ref("this")
+ .syntax()
+ .clone_for_update()
+ .first_token()
+ .expect("NameRef should have had a token.")
+ };
if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local)
.filter_map(|FileReference { name, range, .. }| match name {
- ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
+ FileReferenceNode::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None,
})
- .for_each(|it| {
- ted::replace(it, &this());
- })
+ .for_each(|usage| {
+ ted::replace(usage, &this());
+ });
}
}
@@ -470,7 +475,9 @@ fn inline(
}
} else if let Some(stmt_list) = body.stmt_list() {
ted::insert_all(
- ted::Position::after(stmt_list.l_curly_token().unwrap()),
+ ted::Position::after(
+ stmt_list.l_curly_token().expect("L_CURLY for StatementList is missing."),
+ ),
let_stmts.into_iter().map(|stmt| stmt.syntax().clone().into()).collect(),
);
}
@@ -481,8 +488,12 @@ fn inline(
};
body.reindent_to(original_indentation);
+ let no_stmts = body.statements().next().is_none();
match body.tail_expr() {
- Some(expr) if !is_async_fn && body.statements().next().is_none() => expr,
+ Some(expr) if matches!(expr, ast::Expr::ClosureExpr(_)) && no_stmts => {
+ make::expr_paren(expr).clone_for_update()
+ }
+ Some(expr) if !is_async_fn && no_stmts => expr,
_ => match node
.syntax()
.parent()
@@ -1474,4 +1485,29 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn inline_call_closure_body() {
+ check_assist(
+ inline_call,
+ r#"
+fn f() -> impl Fn() -> i32 {
+ || 2
+}
+
+fn main() {
+ let _ = $0f()();
+}
+"#,
+ r#"
+fn f() -> impl Fn() -> i32 {
+ || 2
+}
+
+fn main() {
+ let _ = (|| 2)();
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
index e69d1a296..5d8ba43ec 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -2,7 +2,7 @@ use hir::{PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::Definition,
- search::{FileReference, UsageSearchResult},
+ search::{FileReference, FileReferenceNode, UsageSearchResult},
RootDatabase,
};
use syntax::{
@@ -63,7 +63,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
let wrap_in_parens = references
.into_iter()
.filter_map(|FileReference { range, name, .. }| match name {
- ast::NameLike::NameRef(name) => Some((range, name)),
+ FileReferenceNode::NameRef(name) => Some((range, name)),
_ => None,
})
.map(|(range, name_ref)| {
@@ -96,8 +96,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
);
let parent = matches!(
usage_parent,
- ast::Expr::CallExpr(_)
- | ast::Expr::TupleExpr(_)
+ ast::Expr::TupleExpr(_)
| ast::Expr::ArrayExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::ForExpr(_)
@@ -952,4 +951,22 @@ fn f() {
"#,
);
}
+
+ #[test]
+ fn test_inline_closure() {
+ check_assist(
+ inline_local_variable,
+ r#"
+fn main() {
+ let $0f = || 2;
+ let _ = f();
+}
+"#,
+ r#"
+fn main() {
+ let _ = (|| 2)();
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
index 663df266b..965e4aa78 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/into_to_qualified_from.rs
@@ -52,9 +52,13 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
== FamousDefs(sema, scope.krate()).core_convert_Into()?
{
let type_call = sema.type_of_expr(&method_call.clone().into())?;
- let type_call_disp =
- type_call.adjusted().display_source_code(db, scope.module().into(), true).ok()?;
+ let adjusted_tc = type_call.adjusted();
+ if adjusted_tc.contains_unknown() {
+ return None;
+ }
+
+ let sc = adjusted_tc.display_source_code(db, scope.module().into(), true).ok()?;
acc.add(
AssistId("into_to_qualified_from", AssistKind::Generate),
"Convert `into` to fully qualified `from`",
@@ -62,7 +66,11 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
|edit| {
edit.replace(
method_call.syntax().text_range(),
- format!("{}::from({})", type_call_disp, receiver),
+ if sc.chars().all(|c| c.is_alphanumeric() || c == ':') {
+ format!("{}::from({})", sc, receiver)
+ } else {
+ format!("<{}>::from({})", sc, receiver)
+ },
);
},
);
@@ -202,4 +210,64 @@ fn main() -> () {
}"#,
)
}
+
+ #[test]
+ fn preceding_type_qualifier() {
+ check_assist(
+ into_to_qualified_from,
+ r#"
+//- minicore: from
+impl From<(i32,i32)> for [i32;2] {
+ fn from(value: (i32,i32)) -> Self {
+ [value.0, value.1]
+ }
+}
+
+fn tuple_to_array() -> [i32; 2] {
+ (0,1).in$0to()
+}"#,
+ r#"
+impl From<(i32,i32)> for [i32;2] {
+ fn from(value: (i32,i32)) -> Self {
+ [value.0, value.1]
+ }
+}
+
+fn tuple_to_array() -> [i32; 2] {
+ <[i32; 2]>::from((0,1))
+}"#,
+ )
+ }
+
+ #[test]
+ fn type_with_gens() {
+ check_assist(
+ into_to_qualified_from,
+ r#"
+//- minicore: from
+struct StructA<Gen>(Gen);
+
+impl From<i32> for StructA<i32> {
+ fn from(value: i32) -> Self {
+ StructA(value + 1)
+ }
+}
+
+fn main() -> () {
+ let a: StructA<i32> = 3.in$0to();
+}"#,
+ r#"
+struct StructA<Gen>(Gen);
+
+impl From<i32> for StructA<i32> {
+ fn from(value: i32) -> Self {
+ StructA(value + 1)
+ }
+}
+
+fn main() -> () {
+ let a: StructA<i32> = <StructA<i32>>::from(3);
+}"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
index 4bf974a56..ff65aac82 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs
@@ -48,6 +48,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) ->
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call);
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
index 239149dc4..fde46db30 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs
@@ -37,8 +37,11 @@ use crate::{
// ```
pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
- let mut proposed_imports =
- import_assets.search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std);
+ let mut proposed_imports = import_assets.search_for_relative_paths(
+ &ctx.sema,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ );
if proposed_imports.is_empty() {
return None;
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
index ffc32f804..0281b29cd 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -1,4 +1,4 @@
-use syntax::{ast, AstNode};
+use syntax::{ast, AstNode, SyntaxKind, T};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -39,7 +39,19 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
AssistId("remove_parentheses", AssistKind::Refactor),
"Remove redundant parentheses",
target,
- |builder| builder.replace_ast(parens.into(), expr),
+ |builder| {
+ let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token());
+ let need_to_add_ws = match prev_token {
+ Some(it) => {
+ let tokens = vec![T![&], T![!], T!['('], T!['['], T!['{']];
+ it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind())
+ }
+ None => false,
+ };
+ let expr = if need_to_add_ws { format!(" {}", expr) } else { expr.to_string() };
+
+ builder.replace(parens.syntax().text_range(), expr)
+ },
)
}
@@ -50,6 +62,15 @@ mod tests {
use super::*;
#[test]
+ fn remove_parens_space() {
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { match$0(true) {} }"#,
+ r#"fn f() { match true {} }"#,
+ );
+ }
+
+ #[test]
fn remove_parens_simple() {
check_assist(remove_parentheses, r#"fn f() { $0(2) + 2; }"#, r#"fn f() { 2 + 2; }"#);
check_assist(remove_parentheses, r#"fn f() { ($02) + 2; }"#, r#"fn f() { 2 + 2; }"#);
@@ -94,8 +115,8 @@ mod tests {
check_assist(remove_parentheses, r#"fn f() { f(($02 + 2)); }"#, r#"fn f() { f(2 + 2); }"#);
check_assist(
remove_parentheses,
- r#"fn f() { (1<2)&&$0(3>4); }"#,
- r#"fn f() { (1<2)&&3>4; }"#,
+ r#"fn f() { (1<2) &&$0(3>4); }"#,
+ r#"fn f() { (1<2) && 3>4; }"#,
);
}
@@ -164,8 +185,8 @@ mod tests {
fn remove_parens_weird_places() {
check_assist(
remove_parentheses,
- r#"fn f() { match () { _=>$0(()) } }"#,
- r#"fn f() { match () { _=>() } }"#,
+ r#"fn f() { match () { _ =>$0(()) } }"#,
+ r#"fn f() { match () { _ => () } }"#,
);
check_assist(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 5fcab8c02..ee44064e7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -1,6 +1,6 @@
use std::collections::{hash_map::Entry, HashMap};
-use hir::{InFile, Module, ModuleSource};
+use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use ide_db::{
base_db::FileRange,
defs::Definition,
@@ -167,7 +167,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
- if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) {
(file_id, Some(call_source.text_range()))
} else {
(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index ac45581b7..b54e4204e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -1,4 +1,4 @@
-use hir::{InFile, ModuleDef};
+use hir::{InFile, MacroFileIdExt, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{
@@ -43,12 +43,12 @@ pub(crate) fn replace_derive_with_manual_impl(
) -> Option<()> {
let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
let path = attr.path()?;
- let hir_file = ctx.sema.hir_file_for(attr.syntax());
- if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
+ let macro_file = ctx.sema.hir_file_for(attr.syntax()).macro_file()?;
+ if !macro_file.is_derive_attr_pseudo_expansion(ctx.db()) {
return None;
}
- let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
+ let InFile { file_id, value } = macro_file.call_node(ctx.db());
if file_id.is_macro() {
// FIXME: make this work in macro files
return None;
@@ -56,7 +56,7 @@ pub(crate) fn replace_derive_with_manual_impl(
// collect the derive paths from the #[derive] expansion
let current_derives = ctx
.sema
- .parse_or_expand(hir_file)
+ .parse_or_expand(macro_file.into())
.descendants()
.filter_map(ast::Attr::cast)
.filter_map(|attr| attr.path())
@@ -82,7 +82,12 @@ pub(crate) fn replace_derive_with_manual_impl(
})
.flat_map(|trait_| {
current_module
- .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.prefer_no_std)
+ .find_use_path(
+ ctx.sema.db,
+ hir::ModuleDef::Trait(trait_),
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.as_ref()
.map(mod_path_to_ast)
.zip(Some(trait_))
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
new file mode 100644
index 000000000..b1daaea1e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -0,0 +1,172 @@
+use syntax::ast::{self, AstNode};
+
+use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: replace_is_some_with_if_let_some
+//
+// Replace `if x.is_some()` with `if let Some(_tmp) = x` or `if x.is_ok()` with `if let Ok(_tmp) = x`.
+//
+// ```
+// fn main() {
+// let x = Some(1);
+// if x.is_som$0e() {}
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let x = Some(1);
+// if let Some(${0:x}) = x {}
+// }
+// ```
+pub(crate) fn replace_is_method_with_if_let_method(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let if_expr = ctx.find_node_at_offset::<ast::IfExpr>()?;
+
+ let cond = if_expr.condition()?;
+ let call_expr = match cond {
+ ast::Expr::MethodCallExpr(call) => call,
+ _ => return None,
+ };
+
+ let name_ref = call_expr.name_ref()?;
+ match name_ref.text().as_str() {
+ "is_some" | "is_ok" => {
+ let receiver = call_expr.receiver()?;
+
+ let var_name = if let ast::Expr::PathExpr(path_expr) = receiver.clone() {
+ path_expr.path()?.to_string()
+ } else {
+ suggest_name::for_variable(&receiver, &ctx.sema)
+ };
+
+ let target = call_expr.syntax().text_range();
+
+ let (assist_id, message, text) = if name_ref.text() == "is_some" {
+ ("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some")
+ } else {
+ ("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok")
+ };
+
+ acc.add(AssistId(assist_id, AssistKind::RefactorRewrite), message, target, |edit| {
+ let var_name = format!("${{0:{}}}", var_name);
+ let replacement = format!("let {}({}) = {}", text, var_name, receiver);
+ edit.replace(target, replacement);
+ })
+ }
+ _ => return None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::replace_is_method_with_if_let_method;
+
+ #[test]
+ fn replace_is_some_with_if_let_some_works() {
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Some(1);
+ if x.is_som$0e() {}
+}
+"#,
+ r#"
+fn main() {
+ let x = Some(1);
+ if let Some(${0:x}) = x {}
+}
+"#,
+ );
+
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn test() -> Option<i32> {
+ Some(1)
+}
+fn main() {
+ if test().is_som$0e() {}
+}
+"#,
+ r#"
+fn test() -> Option<i32> {
+ Some(1)
+}
+fn main() {
+ if let Some(${0:test}) = test() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_is_some_with_if_let_some_not_applicable() {
+ check_assist_not_applicable(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Some(1);
+ if x.is_non$0e() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_is_ok_with_if_let_ok_works() {
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Ok(1);
+ if x.is_o$0k() {}
+}
+"#,
+ r#"
+fn main() {
+ let x = Ok(1);
+ if let Ok(${0:x}) = x {}
+}
+"#,
+ );
+
+ check_assist(
+ replace_is_method_with_if_let_method,
+ r#"
+fn test() -> Result<i32> {
+ Ok(1)
+}
+fn main() {
+ if test().is_o$0k() {}
+}
+"#,
+ r#"
+fn test() -> Result<i32> {
+ Ok(1)
+}
+fn main() {
+ if let Ok(${0:test}) = test() {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn replace_is_ok_with_if_let_ok_not_applicable() {
+ check_assist_not_applicable(
+ replace_is_method_with_if_let_method,
+ r#"
+fn main() {
+ let x = Ok(1);
+ if x.is_e$0rr() {}
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
index c7c0be4c7..e61ce4817 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
@@ -59,7 +59,10 @@ pub(crate) fn replace_named_generic_with_impl(
let mut path_types_to_replace = Vec::new();
for (_a, refs) in usage_refs.iter() {
for usage_ref in refs {
- let param_node = find_path_type(&ctx.sema, &type_param_name, &usage_ref.name)?;
+ let Some(name_like) = usage_ref.name.clone().into_name_like() else {
+ continue;
+ };
+ let param_node = find_path_type(&ctx.sema, &type_param_name, &name_like)?;
path_types_to_replace.push(param_node);
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
index dbbc56958..f03eb6118 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -68,6 +68,7 @@ pub(crate) fn replace_qualified_name_with_use(
module,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)
})
.flatten();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
index b7d57f02b..f864ee50c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_ignore.rs
@@ -55,7 +55,7 @@ pub(crate) fn toggle_ignore(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
}
fn has_ignore_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
- fn_def.attrs().find(|attr| attr.path().map(|it| it.syntax().text() == "ignore") == Some(true))
+ fn_def.attrs().find(|attr| attr.path().is_some_and(|it| it.syntax().text() == "ignore"))
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
index dac216b69..52df30d96 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_use.rs
@@ -36,29 +36,25 @@ pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let old_parent_range = use_.syntax().parent()?.text_range();
let new_parent = use_.syntax().parent()?;
+ // If possible, explain what is going to be done.
+ let label = match tree.path().and_then(|path| path.first_segment()) {
+ Some(name) => format!("Unmerge use of `{name}`"),
+ None => "Unmerge use".into(),
+ };
+
let target = tree.syntax().text_range();
- acc.add(
- AssistId("unmerge_use", AssistKind::RefactorRewrite),
- "Unmerge use",
- target,
- |builder| {
- let new_use = make::use_(
- use_.visibility(),
- make::use_tree(
- path,
- tree.use_tree_list(),
- tree.rename(),
- tree.star_token().is_some(),
- ),
- )
- .clone_for_update();
-
- tree.remove();
- ted::insert(Position::after(use_.syntax()), new_use.syntax());
-
- builder.replace(old_parent_range, new_parent.to_string());
- },
- )
+ acc.add(AssistId("unmerge_use", AssistKind::RefactorRewrite), label, target, |builder| {
+ let new_use = make::use_(
+ use_.visibility(),
+ make::use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()),
+ )
+ .clone_for_update();
+
+ tree.remove();
+ ted::insert(Position::after(use_.syntax()), new_use.syntax());
+
+ builder.replace(old_parent_range, new_parent.to_string());
+ })
}
fn resolve_full_path(tree: &ast::UseTree) -> Option<ast::Path> {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
index 7f612c2a1..1cfa291a2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unnecessary_async.rs
@@ -2,11 +2,11 @@ use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,
defs::Definition,
- search::FileReference,
+ search::{FileReference, FileReferenceNode},
syntax_helpers::node_ext::full_path_of_name_ref,
};
use syntax::{
- ast::{self, NameLike, NameRef},
+ ast::{self, NameRef},
AstNode, SyntaxKind, TextRange,
};
@@ -76,7 +76,7 @@ pub(crate) fn unnecessary_async(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
for await_expr in find_all_references(ctx, &Definition::Function(fn_def))
// Keep only references that correspond NameRefs.
.filter_map(|(_, reference)| match reference.name {
- NameLike::NameRef(nameref) => Some(nameref),
+ FileReferenceNode::NameRef(nameref) => Some(nameref),
_ => None,
})
// Keep only references that correspond to await expressions
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
index e9d4e270c..0876246e9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unqualify_method_call.rs
@@ -1,3 +1,4 @@
+use ide_db::imports::insert_use::ImportScope;
use syntax::{
ast::{self, make, AstNode, HasArgList},
TextRange,
@@ -17,6 +18,8 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ```
// ->
// ```
+// use std::ops::Add;
+//
// fn main() {
// 1.add(2);
// }
@@ -38,7 +41,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
let first_arg = args_iter.next()?;
let second_arg = args_iter.next();
- _ = path.qualifier()?;
+ let qualifier = path.qualifier()?;
let method_name = path.segment()?.name_ref()?;
let res = ctx.sema.resolve_path(&path)?;
@@ -76,10 +79,51 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
edit.insert(close, ")");
}
edit.replace(replace_comma, format!(".{method_name}("));
+ add_import(qualifier, ctx, edit);
},
)
}
+fn add_import(
+ qualifier: ast::Path,
+ ctx: &AssistContext<'_>,
+ edit: &mut ide_db::source_change::SourceChangeBuilder,
+) {
+ if let Some(path_segment) = qualifier.segment() {
+ // for `<i32 as std::ops::Add>`
+ let path_type = path_segment.qualifying_trait();
+ let import = match path_type {
+ Some(it) => {
+ if let Some(path) = it.path() {
+ path
+ } else {
+ return;
+ }
+ }
+ None => qualifier,
+ };
+
+ // in case for `<_>`
+ if import.coloncolon_token().is_none() {
+ return;
+ }
+
+ let scope = ide_db::imports::insert_use::ImportScope::find_insert_use_container(
+ import.syntax(),
+ &ctx.sema,
+ );
+
+ if let Some(scope) = scope {
+ let scope = match scope {
+ ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
+ };
+ ide_db::imports::insert_use::insert_use(&scope, import, &ctx.config.insert_use);
+ }
+ }
+}
+
fn needs_parens_as_receiver(expr: &ast::Expr) -> bool {
// Make `(expr).dummy()`
let dummy_call = make::expr_method_call(
@@ -127,6 +171,8 @@ fn f() { S.f(S); }"#,
//- minicore: add
fn f() { <u32 as core::ops::Add>::$0add(2, 2); }"#,
r#"
+use core::ops::Add;
+
fn f() { 2.add(2); }"#,
);
@@ -136,6 +182,8 @@ fn f() { 2.add(2); }"#,
//- minicore: add
fn f() { core::ops::Add::$0add(2, 2); }"#,
r#"
+use core::ops::Add;
+
fn f() { 2.add(2); }"#,
);
@@ -179,6 +227,8 @@ impl core::ops::Deref for S {
}
fn f() { core::ops::Deref::$0deref(&S); }"#,
r#"
+use core::ops::Deref;
+
struct S;
impl core::ops::Deref for S {
type Target = S;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
index f235b554e..03e6dfebe 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
@@ -123,10 +123,8 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
- Expr::ReturnExpr(ret_expr) => {
- if let Some(ret_expr_arg) = &ret_expr.expr() {
- for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
- }
+ Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
}
@@ -801,6 +799,24 @@ fn foo() -> i32 {
}
#[test]
+ fn wrap_return_in_tail_position() {
+ check_assist(
+ unwrap_result_return_type,
+ r#"
+//- minicore: result
+fn foo(num: i32) -> $0Result<i32, String> {
+ return Ok(num)
+}
+"#,
+ r#"
+fn foo(num: i32) -> i32 {
+ return num
+}
+"#,
+ );
+ }
+
+ #[test]
fn unwrap_result_return_type_simple_with_closure() {
check_assist(
unwrap_result_return_type,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
index 61e9bcdcc..b68ed00f7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
@@ -98,10 +98,8 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
- Expr::ReturnExpr(ret_expr) => {
- if let Some(ret_expr_arg) = &ret_expr.expr() {
- for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
- }
+ Expr::ReturnExpr(_) => {
+ // all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
}
@@ -733,6 +731,24 @@ fn foo() -> Result<i32, ${0:_}> {
}
#[test]
+ fn wrap_return_in_tail_position() {
+ check_assist(
+ wrap_return_type_in_result,
+ r#"
+//- minicore: result
+fn foo(num: i32) -> $0i32 {
+ return num
+}
+"#,
+ r#"
+fn foo(num: i32) -> Result<i32, ${0:_}> {
+ return Ok(num)
+}
+"#,
+ );
+ }
+
+ #[test]
fn wrap_return_type_in_result_simple_with_closure() {
check_assist(
wrap_return_type_in_result,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index 6f973ab53..1e4d1c94f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -58,7 +58,7 @@
//! See also this post:
//! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[allow(unused)]
macro_rules! eprintln {
@@ -115,6 +115,7 @@ mod handlers {
mod apply_demorgan;
mod auto_import;
mod bind_unused_param;
+ mod bool_to_enum;
mod change_visibility;
mod convert_bool_then;
mod convert_comment_block;
@@ -124,6 +125,7 @@ mod handlers {
mod convert_let_else_to_match;
mod convert_match_to_let_else;
mod convert_nested_function_to_closure;
+ mod convert_tuple_return_type_to_struct;
mod convert_tuple_struct_to_named_struct;
mod convert_named_struct_to_tuple_struct;
mod convert_to_guarded_return;
@@ -158,6 +160,7 @@ mod handlers {
mod generate_getter_or_setter;
mod generate_impl;
mod generate_is_empty_from_len;
+ mod generate_mut_trait_impl;
mod generate_new;
mod generate_delegate_methods;
mod generate_trait_from_impl;
@@ -193,6 +196,7 @@ mod handlers {
mod replace_try_expr_with_match;
mod replace_derive_with_manual_impl;
mod replace_if_let_with_match;
+ mod replace_is_method_with_if_let_method;
mod replace_method_eager_lazy;
mod replace_arith_op;
mod introduce_named_generic;
@@ -225,8 +229,10 @@ mod handlers {
add_return_type::add_return_type,
add_turbo_fish::add_turbo_fish,
apply_demorgan::apply_demorgan,
+ apply_demorgan::apply_demorgan_iterator,
auto_import::auto_import,
bind_unused_param::bind_unused_param,
+ bool_to_enum::bool_to_enum,
change_visibility::change_visibility,
convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then,
@@ -237,6 +243,7 @@ mod handlers {
convert_iter_for_each_to_for::convert_for_loop_with_for_each,
convert_let_else_to_match::convert_let_else_to_match,
convert_match_to_let_else::convert_match_to_let_else,
+ convert_tuple_return_type_to_struct::convert_tuple_return_type_to_struct,
convert_named_struct_to_tuple_struct::convert_named_struct_to_tuple_struct,
convert_nested_function_to_closure::convert_nested_function_to_closure,
convert_to_guarded_return::convert_to_guarded_return,
@@ -268,6 +275,7 @@ mod handlers {
generate_function::generate_function,
generate_impl::generate_impl,
generate_impl::generate_trait_impl,
+ generate_mut_trait_impl::generate_mut_trait_impl,
generate_is_empty_from_len::generate_is_empty_from_len,
generate_new::generate_new,
generate_trait_from_impl::generate_trait_from_impl,
@@ -308,6 +316,7 @@ mod handlers {
replace_derive_with_manual_impl::replace_derive_with_manual_impl,
replace_if_let_with_match::replace_if_let_with_match,
replace_if_let_with_match::replace_match_with_if_let,
+ replace_is_method_with_if_let_method::replace_is_method_with_if_let_method,
replace_let_with_if_let::replace_let_with_if_let,
replace_method_eager_lazy::replace_with_eager_method,
replace_method_eager_lazy::replace_with_lazy_method,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index cc3e251a8..25b3d6d9d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -30,6 +30,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
skip_glob_imports: true,
},
prefer_no_std: false,
+ prefer_prelude: true,
assist_emit_must_use: false,
};
@@ -44,6 +45,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
skip_glob_imports: true,
},
prefer_no_std: false,
+ prefer_prelude: true,
assist_emit_must_use: false,
};
@@ -98,6 +100,11 @@ pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) {
check(assist, ra_fixture, ExpectedResult::NotApplicable, None);
}
+#[track_caller]
+pub(crate) fn check_assist_not_applicable_by_label(assist: Handler, ra_fixture: &str, label: &str) {
+ check(assist, ra_fixture, ExpectedResult::NotApplicable, Some(label));
+}
+
/// Check assist in unresolved state. Useful to check assists for lazy computation.
#[track_caller]
pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index dfaa53449..da5822bba 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -245,6 +245,30 @@ fn main() {
}
#[test]
+fn doctest_apply_demorgan_iterator() {
+ check_doc_test(
+ "apply_demorgan_iterator",
+ r#####"
+//- minicore: iterator
+fn main() {
+ let arr = [1, 2, 3];
+ if !arr.into_iter().$0any(|num| num == 4) {
+ println!("foo");
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let arr = [1, 2, 3];
+ if arr.into_iter().all(|num| num != 4) {
+ println!("foo");
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_auto_import() {
check_doc_test(
"auto_import",
@@ -281,6 +305,34 @@ fn some_function(x: i32) {
}
#[test]
+fn doctest_bool_to_enum() {
+ check_doc_test(
+ "bool_to_enum",
+ r#####"
+fn main() {
+ let $0bool = true;
+
+ if bool {
+ println!("foo");
+ }
+}
+"#####,
+ r#####"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let bool = Bool::True;
+
+ if bool == Bool::True {
+ println!("foo");
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_change_visibility() {
check_doc_test(
"change_visibility",
@@ -559,6 +611,33 @@ fn main() {
}
#[test]
+fn doctest_convert_tuple_return_type_to_struct() {
+ check_doc_test(
+ "convert_tuple_return_type_to_struct",
+ r#####"
+fn bar() {
+ let (a, b, c) = foo();
+}
+
+fn foo() -> ($0u32, u32, u32) {
+ (1, 2, 3)
+}
+"#####,
+ r#####"
+fn bar() {
+ let FooResult(a, b, c) = foo();
+}
+
+struct FooResult(u32, u32, u32);
+
+fn foo() -> FooResult {
+ FooResult(1, 2, 3)
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_convert_tuple_struct_to_named_struct() {
check_doc_test(
"convert_tuple_struct_to_named_struct",
@@ -1460,6 +1539,42 @@ impl MyStruct {
}
#[test]
+fn doctest_generate_mut_trait_impl() {
+ check_doc_test(
+ "generate_mut_trait_impl",
+ r#####"
+//- minicore: index
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+impl<T> core::ops::Index$0<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#####,
+ r#####"
+pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ &self[index as usize]
+ }
+}
+
+impl<T> core::ops::Index<Axis> for [T; 3] {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ &self[index as usize]
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_new() {
check_doc_test(
"generate_new",
@@ -2480,6 +2595,25 @@ fn handle(action: Action) {
}
#[test]
+fn doctest_replace_is_some_with_if_let_some() {
+ check_doc_test(
+ "replace_is_some_with_if_let_some",
+ r#####"
+fn main() {
+ let x = Some(1);
+ if x.is_som$0e() {}
+}
+"#####,
+ r#####"
+fn main() {
+ let x = Some(1);
+ if let Some(${0:x}) = x {}
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_replace_let_with_if_let() {
check_doc_test(
"replace_let_with_if_let",
@@ -2850,6 +2984,8 @@ fn main() {
mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } }
"#####,
r#####"
+use std::ops::Add;
+
fn main() {
1.add(2);
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index a262570d9..f51e99a91 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -106,8 +106,18 @@ pub fn filter_assoc_items(
.iter()
.copied()
.filter(|assoc_item| {
- !(ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
- && assoc_item.attrs(sema.db).has_doc_hidden())
+ if ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
+ && assoc_item.attrs(sema.db).has_doc_hidden()
+ {
+ if let hir::AssocItem::Function(f) = assoc_item {
+ if !f.has_body(sema.db) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ return true;
})
// Note: This throws away items with no source.
.filter_map(|assoc_item| {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
index 092fb3036..60f90a41b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
+itertools.workspace = true
once_cell = "1.17.0"
smallvec.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
index f60ac1501..7d38c638a 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs
@@ -626,6 +626,7 @@ fn enum_variants_with_paths(
ctx.db,
hir::ModuleDef::from(variant),
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
) {
// Variants with trivial paths are already added by the existing completion logic,
// so we should avoid adding these twice
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
index c5bbb7f8d..613a35dcb 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs
@@ -26,17 +26,17 @@ pub(crate) fn complete_dot(
item.add_to(acc, ctx.db);
}
- if let DotAccessKind::Method { .. } = dot_access.kind {
- cov_mark::hit!(test_no_struct_field_completion_for_method_call);
- } else {
- complete_fields(
- acc,
- ctx,
- receiver_ty,
- |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
- |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
- );
- }
+ let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
+
+ complete_fields(
+ acc,
+ ctx,
+ receiver_ty,
+ |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
+ |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
+ is_field_access,
+ );
+
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
}
@@ -82,6 +82,7 @@ pub(crate) fn complete_undotted_self(
)
},
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
+ true,
);
complete_methods(ctx, &ty, |func| {
acc.add_method(
@@ -104,18 +105,23 @@ fn complete_fields(
receiver: &hir::Type,
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
+ is_field_access: bool,
) {
let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) {
- if seen_names.insert(field.name(ctx.db)) {
+ if seen_names.insert(field.name(ctx.db))
+ && (is_field_access || ty.is_fn() || ty.is_closure())
+ {
named_field(acc, field, ty);
}
}
for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
// Tuples are always the last type in a deref chain, so just check if the name is
// already seen without inserting into the hashset.
- if !seen_names.contains(&hir::Name::new_tuple_field(i)) {
+ if !seen_names.contains(&hir::Name::new_tuple_field(i))
+ && (is_field_access || ty.is_fn() || ty.is_closure())
+ {
// Tuple fields are always public (tuple struct fields are handled above).
tuple_index(acc, i, ty);
}
@@ -250,7 +256,6 @@ impl A {
#[test]
fn test_no_struct_field_completion_for_method_call() {
- cov_mark::check!(test_no_struct_field_completion_for_method_call);
check(
r#"
struct A { the_field: u32 }
@@ -935,9 +940,9 @@ impl Foo { fn foo(&self) { $0 } }"#,
expect![[r#"
fd self.field i32
lc self &Foo
- sp Self
- st Foo
- bt u32
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
me self.foo() fn(&self)
"#]],
);
@@ -949,9 +954,9 @@ impl Foo { fn foo(&mut self) { $0 } }"#,
expect![[r#"
fd self.0 i32
lc self &mut Foo
- sp Self
- st Foo
- bt u32
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
me self.foo() fn(&mut self)
"#]],
);
@@ -1095,4 +1100,140 @@ fn test(s: S<Unknown>) {
"#]],
);
}
+
+ #[test]
+ fn assoc_impl_1() {
+ check(
+ r#"
+//- minicore: deref
+fn main() {
+ let foo: Foo<&u8> = Foo::new(&42_u8);
+ foo.$0
+}
+
+trait Bar {
+ fn bar(&self);
+}
+
+impl Bar for u8 {
+ fn bar(&self) {}
+}
+
+struct Foo<F> {
+ foo: F,
+}
+
+impl<F> Foo<F> {
+ fn new(foo: F) -> Foo<F> {
+ Foo { foo }
+ }
+}
+
+impl<F: core::ops::Deref<Target = impl Bar>> Foo<F> {
+ fn foobar(&self) {
+ self.foo.deref().bar()
+ }
+}
+"#,
+ expect![[r#"
+ fd foo &u8
+ me foobar() fn(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn assoc_impl_2() {
+ check(
+ r#"
+//- minicore: deref
+fn main() {
+ let foo: Foo<&u8> = Foo::new(&42_u8);
+ foo.$0
+}
+
+trait Bar {
+ fn bar(&self);
+}
+
+struct Foo<F> {
+ foo: F,
+}
+
+impl<F> Foo<F> {
+ fn new(foo: F) -> Foo<F> {
+ Foo { foo }
+ }
+}
+
+impl<B: Bar, F: core::ops::Deref<Target = B>> Foo<F> {
+ fn foobar(&self) {
+ self.foo.deref().bar()
+ }
+}
+"#,
+ expect![[r#"
+ fd foo &u8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_function_field_completion() {
+ check(
+ r#"
+struct S { va_field: u32, fn_field: fn() }
+fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() }
+"#,
+ expect![[r#"
+ fd fn_field fn()
+ "#]],
+ );
+
+ check_edit(
+ "fn_field",
+ r#"
+struct S { va_field: u32, fn_field: fn() }
+fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() }
+"#,
+ r#"
+struct S { va_field: u32, fn_field: fn() }
+fn foo() { (S { va_field: 0, fn_field: || {} }.fn_field)() }
+"#,
+ );
+ }
+
+ #[test]
+ fn test_tuple_function_field_completion() {
+ check(
+ r#"
+struct B(u32, fn())
+fn foo() {
+ let b = B(0, || {});
+ b.$0()
+}
+"#,
+ expect![[r#"
+ fd 1 fn()
+ "#]],
+ );
+
+ check_edit(
+ "1",
+ r#"
+struct B(u32, fn())
+fn foo() {
+ let b = B(0, || {});
+ b.$0()
+}
+"#,
+ r#"
+struct B(u32, fn())
+fn foo() {
+ let b = B(0, || {});
+ (b.1)()
+}
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
index 9daa6984c..d3c817d4b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs
@@ -175,6 +175,7 @@ pub(crate) fn complete_expr_path(
ctx.db,
hir::ModuleDef::from(strukt),
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)
.filter(|it| it.len() > 1);
@@ -197,6 +198,7 @@ pub(crate) fn complete_expr_path(
ctx.db,
hir::ModuleDef::from(un),
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)
.filter(|it| it.len() > 1);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
index 39c1b7f7b..d74d3b264 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs
@@ -13,10 +13,9 @@ use crate::{
TypeLocation,
},
render::{render_resolution_with_import, render_resolution_with_import_pat, RenderContext},
+ Completions,
};
-use super::Completions;
-
// Feature: Completion With Autoimport
//
// When completing names in the current scope, proposes additional imports from other modules or crates,
@@ -258,7 +257,12 @@ fn import_on_the_fly(
let user_input_lowercased = potential_import_name.to_lowercase();
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.into_iter()
.filter(ns_filter)
.filter(|import| {
@@ -300,7 +304,12 @@ fn import_on_the_fly_pat_(
let user_input_lowercased = potential_import_name.to_lowercase();
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.into_iter()
.filter(ns_filter)
.filter(|import| {
@@ -337,7 +346,12 @@ fn import_on_the_fly_method(
let user_input_lowercased = potential_import_name.to_lowercase();
import_assets
- .search_for_imports(&ctx.sema, ctx.config.insert_use.prefix_kind, ctx.config.prefer_no_std)
+ .search_for_imports(
+ &ctx.sema,
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
+ )
.into_iter()
.filter(|import| {
!ctx.is_item_hidden(&import.item_to_import)
@@ -377,9 +391,12 @@ fn import_assets_for_path(
&ctx.sema,
ctx.token.parent()?,
)?;
- if fuzzy_name_length < 3 {
- cov_mark::hit!(flyimport_exact_on_short_path);
- assets_for_path.path_fuzzy_name_to_exact(false);
+ if fuzzy_name_length == 0 {
+ // nothing matches the empty string exactly, but we still compute assoc items in this case
+ assets_for_path.path_fuzzy_name_to_exact();
+ } else if fuzzy_name_length < 3 {
+ cov_mark::hit!(flyimport_prefix_on_short_path);
+ assets_for_path.path_fuzzy_name_to_prefix();
}
Some(assets_for_path)
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 42dfbfc7d..b0e4d8a5a 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -417,10 +417,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -526,10 +526,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -543,10 +543,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -562,10 +562,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
@@ -610,10 +610,10 @@ impl Test for T {
}
",
expect![[r#"
- sp Self
- st T
+ sp Self T
+ st T T
tt Test
- bt u32
+ bt u32 u32
"#]],
);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
index 1e0989405..5d138eea4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -2,7 +2,7 @@
use std::iter;
-use hir::{Module, ModuleSource};
+use hir::{HirFileIdExt, Module, ModuleSource};
use ide_db::{
base_db::{SourceDatabaseExt, VfsPath},
FxHashSet, RootDatabase, SymbolKind,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
index 945c3945b..46213deb0 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs
@@ -430,4 +430,29 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn callable_field_struct_init() {
+ check_edit(
+ "field",
+ r#"
+struct S {
+ field: fn(),
+}
+
+fn main() {
+ S {fi$0
+}
+"#,
+ r#"
+struct S {
+ field: fn(),
+}
+
+fn main() {
+ S {field
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
index 7a60030e9..81107c1f4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs
@@ -71,9 +71,9 @@ pub(crate) fn complete_use_path(
if add_resolution {
let mut builder = Builder::from_resolution(ctx, path_ctx, name, def);
- builder.set_relevance(CompletionRelevance {
+ builder.with_relevance(|r| CompletionRelevance {
is_name_already_imported,
- ..Default::default()
+ ..r
});
acc.add(builder.build(ctx.db));
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
index 8f6a97e1e..ed5ddde8f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -14,10 +14,12 @@ pub struct CompletionConfig {
pub enable_imports_on_the_fly: bool,
pub enable_self_on_the_fly: bool,
pub enable_private_editable: bool,
+ pub full_function_signatures: bool,
pub callable: Option<CallableSnippets>,
pub snippet_cap: Option<SnippetCap>,
pub insert_use: InsertUseConfig,
pub prefer_no_std: bool,
+ pub prefer_prelude: bool,
pub snippets: Vec<Snippet>,
pub limit: Option<usize>,
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index c45cc8d7b..de41a5bd7 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -1,6 +1,6 @@
//! See `CompletionItem` structure.
-use std::fmt;
+use std::{fmt, mem};
use hir::Mutability;
use ide_db::{
@@ -26,6 +26,10 @@ use crate::{
pub struct CompletionItem {
/// Label in the completion pop up which identifies completion.
pub label: SmolStr,
+ /// Additional label details in the completion pop up that are
+ /// displayed and aligned on the right side after the label.
+ pub label_detail: Option<SmolStr>,
+
/// Range of identifier that is being completed.
///
/// It should be used primarily for UI, but we also use this to convert
@@ -89,7 +93,7 @@ impl fmt::Debug for CompletionItem {
let mut s = f.debug_struct("CompletionItem");
s.field("label", &self.label).field("source_range", &self.source_range);
if self.text_edit.len() == 1 {
- let atom = &self.text_edit.iter().next().unwrap();
+ let atom = self.text_edit.iter().next().unwrap();
s.field("delete", &atom.delete);
s.field("insert", &atom.insert);
} else {
@@ -425,13 +429,14 @@ impl Builder {
pub(crate) fn build(self, db: &RootDatabase) -> CompletionItem {
let _p = profile::span("item::Builder::build");
- let mut label = self.label;
+ let label = self.label;
+ let mut label_detail = None;
let mut lookup = self.lookup.unwrap_or_else(|| label.clone());
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
if !self.doc_aliases.is_empty() {
let doc_aliases = self.doc_aliases.iter().join(", ");
- label = SmolStr::from(format!("{label} (alias {doc_aliases})"));
+ label_detail.replace(SmolStr::from(format!(" (alias {doc_aliases})")));
let lookup_doc_aliases = self
.doc_aliases
.iter()
@@ -453,11 +458,16 @@ impl Builder {
}
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
- if let Some(original_path) = import_edit.original_path.as_ref() {
- label = SmolStr::from(format!("{label} (use {})", original_path.display(db)));
- }
+ label_detail.replace(SmolStr::from(format!(
+ "{} (use {})",
+ label_detail.as_deref().unwrap_or_default(),
+ import_edit.import_path.display(db)
+ )));
} else if let Some(trait_name) = self.trait_name {
- label = SmolStr::from(format!("{label} (as {trait_name})"));
+ label_detail.replace(SmolStr::from(format!(
+ "{} (as {trait_name})",
+ label_detail.as_deref().unwrap_or_default(),
+ )));
}
let text_edit = match self.text_edit {
@@ -479,6 +489,7 @@ impl Builder {
CompletionItem {
source_range: self.source_range,
label,
+ label_detail,
text_edit,
is_snippet: self.is_snippet,
detail: self.detail,
@@ -557,6 +568,13 @@ impl Builder {
self.relevance = relevance;
self
}
+ pub(crate) fn with_relevance(
+ &mut self,
+ relevance: impl FnOnce(CompletionRelevance) -> CompletionRelevance,
+ ) -> &mut Builder {
+ self.relevance = relevance(mem::take(&mut self.relevance));
+ self
+ }
pub(crate) fn trigger_call_info(&mut self) -> &mut Builder {
self.trigger_call_info = true;
self
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index 2eaa42040..37a2828e8 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -1,6 +1,6 @@
//! `completions` crate provides utilities for generating completions of user input.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod completions;
mod config;
@@ -169,6 +169,28 @@ pub fn completions(
return Some(completions.into());
}
+ // when the user types a bare `_` (that is it does not belong to an identifier)
+ // the user might just wanted to type a `_` for type inference or pattern discarding
+ // so try to suppress completions in those cases
+ if trigger_character == Some('_') && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE
+ {
+ if let CompletionAnalysis::NameRef(NameRefContext {
+ kind:
+ NameRefKind::Path(
+ path_ctx @ PathCompletionCtx {
+ kind: PathKind::Type { .. } | PathKind::Pat { .. },
+ ..
+ },
+ ),
+ ..
+ }) = analysis
+ {
+ if path_ctx.is_trivial_path() {
+ return None;
+ }
+ }
+ }
+
{
let acc = &mut completions;
@@ -241,6 +263,7 @@ pub fn resolve_completion_edits(
candidate,
config.insert_use.prefix_kind,
config.prefer_no_std,
+ config.prefer_prelude,
)
})
.find(|mod_path| mod_path.display(db).to_string() == full_import_path);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index dfe8fe7e2..2ea3f74d1 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -10,7 +10,7 @@ pub(crate) mod variant;
pub(crate) mod union_literal;
pub(crate) mod literal;
-use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
+use hir::{AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
use ide_db::{
documentation::{Documentation, HasDocs},
helpers::item_name,
@@ -18,9 +18,10 @@ use ide_db::{
RootDatabase, SnippetCap, SymbolKind,
};
use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
+use text_edit::TextEdit;
use crate::{
- context::{DotAccess, PathCompletionCtx, PathKind, PatternContext},
+ context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext},
item::{Builder, CompletionRelevanceTypeMatch},
render::{
function::render_fn,
@@ -147,7 +148,42 @@ pub(crate) fn render_field(
.set_documentation(field.docs(db))
.set_deprecated(is_deprecated)
.lookup_by(name);
- item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name));
+
+ let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
+ if !is_field_access || ty.is_fn() || ty.is_closure() {
+ let mut builder = TextEdit::builder();
+ // Using TextEdit, insert '(' before the struct name and ')' before the
+ // dot access, then comes the field name and optionally insert function
+ // call parens.
+
+ builder.replace(
+ ctx.source_range(),
+ field_with_receiver(db, receiver.as_ref(), &escaped_name).into(),
+ );
+
+ let expected_fn_type =
+ ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure());
+
+ if !expected_fn_type {
+ if let Some(receiver) = &dot_access.receiver {
+ if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) {
+ builder.insert(receiver.syntax().text_range().start(), "(".to_string());
+ builder.insert(ctx.source_range().end(), ")".to_string());
+
+ let is_parens_needed =
+ !matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
+
+ if is_parens_needed {
+ builder.insert(ctx.source_range().end(), "()".to_string());
+ }
+ }
+ }
+ }
+
+ item.text_edit(builder.finish());
+ } else {
+ item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name));
+ }
if let Some(receiver) = &dot_access.receiver {
if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
@@ -304,6 +340,7 @@ fn render_resolution_path(
let cap = ctx.snippet_cap();
let db = completion.db;
let config = completion.config;
+ let requires_import = import_to_add.is_some();
let name = local_name.to_smol_str();
let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
@@ -334,8 +371,8 @@ fn render_resolution_path(
}
}
}
- if let ScopeDef::Local(local) = resolution {
- let ty = local.ty(db);
+
+ let mut set_item_relevance = |ty: Type| {
if !ty.is_unknown() {
item.detail(ty.display(db).to_string());
}
@@ -343,12 +380,38 @@ fn render_resolution_path(
item.set_relevance(CompletionRelevance {
type_match: compute_type_match(completion, &ty),
exact_name_match: compute_exact_name_match(completion, &name),
- is_local: true,
+ is_local: matches!(resolution, ScopeDef::Local(_)),
+ requires_import,
..CompletionRelevance::default()
});
path_ref_match(completion, path_ctx, &ty, &mut item);
};
+
+ match resolution {
+ ScopeDef::Local(local) => set_item_relevance(local.ty(db)),
+ ScopeDef::ModuleDef(ModuleDef::Adt(adt)) | ScopeDef::AdtSelfType(adt) => {
+ set_item_relevance(adt.ty(db))
+ }
+ // Filtered out above
+ ScopeDef::ModuleDef(
+ ModuleDef::Function(_) | ModuleDef::Variant(_) | ModuleDef::Macro(_),
+ ) => (),
+ ScopeDef::ModuleDef(ModuleDef::Const(konst)) => set_item_relevance(konst.ty(db)),
+ ScopeDef::ModuleDef(ModuleDef::Static(stat)) => set_item_relevance(stat.ty(db)),
+ ScopeDef::ModuleDef(ModuleDef::BuiltinType(bt)) => set_item_relevance(bt.ty(db)),
+ ScopeDef::ImplSelfType(imp) => set_item_relevance(imp.self_ty(db)),
+ ScopeDef::GenericParam(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown
+ | ScopeDef::ModuleDef(
+ ModuleDef::Trait(_)
+ | ModuleDef::TraitAlias(_)
+ | ModuleDef::Module(_)
+ | ModuleDef::TypeAlias(_),
+ ) => (),
+ };
+
item
}
@@ -435,6 +498,21 @@ fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> boo
}
}
+// FIXME: This checks types without possible coercions which some completions might want to do
+fn match_types(
+ ctx: &CompletionContext<'_>,
+ ty1: &hir::Type,
+ ty2: &hir::Type,
+) -> Option<CompletionRelevanceTypeMatch> {
+ if ty1 == ty2 {
+ Some(CompletionRelevanceTypeMatch::Exact)
+ } else if ty1.could_unify_with(ctx.db, ty2) {
+ Some(CompletionRelevanceTypeMatch::CouldUnify)
+ } else {
+ None
+ }
+}
+
fn compute_type_match(
ctx: &CompletionContext<'_>,
completion_ty: &hir::Type,
@@ -447,13 +525,7 @@ fn compute_type_match(
return None;
}
- if completion_ty == expected_type {
- Some(CompletionRelevanceTypeMatch::Exact)
- } else if expected_type.could_unify_with(ctx.db, completion_ty) {
- Some(CompletionRelevanceTypeMatch::CouldUnify)
- } else {
- None
- }
+ match_types(ctx, expected_type, completion_ty)
}
fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
@@ -557,7 +629,11 @@ mod tests {
let tag = it.kind.tag();
let relevance = display_relevance(it.relevance);
- items.push(format!("{tag} {} {relevance}\n", it.label));
+ items.push(format!(
+ "{tag} {}{} {relevance}\n",
+ it.label,
+ it.label_detail.clone().unwrap_or_default(),
+ ));
if let Some((label, _indel, relevance)) = it.ref_match() {
let relevance = display_relevance(relevance);
@@ -596,6 +672,330 @@ mod tests {
}
#[test]
+ fn set_struct_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub struct Struct {}
+}
+
+pub mod test_mod_a {
+ pub struct Struct {}
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Struct) { }
+
+fn main() {
+ test(Struct$0);
+}
+"#,
+ expect![[r#"
+ st dep::test_mod_b::Struct {…} [type_could_unify]
+ st Struct (use dep::test_mod_b::Struct) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ st Struct (use dep::test_mod_a::Struct) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_union_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub union Union {
+ a: i32,
+ b: i32
+ }
+}
+
+pub mod test_mod_a {
+ pub enum Union {
+ a: i32,
+ b: i32
+ }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Union) { }
+
+fn main() {
+ test(Union$0);
+}
+"#,
+ expect![[r#"
+ un Union (use dep::test_mod_b::Union) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ en Union (use dep::test_mod_a::Union) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_enum_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub enum Enum {
+ variant
+ }
+}
+
+pub mod test_mod_a {
+ pub enum Enum {
+ variant
+ }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Enum) { }
+
+fn main() {
+ test(Enum$0);
+}
+"#,
+ expect![[r#"
+ ev dep::test_mod_b::Enum::variant [type_could_unify]
+ en Enum (use dep::test_mod_b::Enum) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ en Enum (use dep::test_mod_a::Enum) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_enum_variant_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub enum Enum {
+ Variant
+ }
+}
+
+pub mod test_mod_a {
+ pub enum Enum {
+ Variant
+ }
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: dep::test_mod_b::Enum) { }
+
+fn main() {
+ test(Variant$0);
+}
+"#,
+ expect![[r#"
+ ev dep::test_mod_b::Enum::Variant [type_could_unify]
+ fn main() []
+ fn test(…) []
+ md dep []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_fn_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub fn function(j: isize) -> i32 {}
+}
+
+pub mod test_mod_a {
+ pub fn function(i: usize) -> i32 {}
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: fn(usize) -> i32) { }
+
+fn main() {
+ test(function$0);
+}
+"#,
+ expect![[r#"
+ fn main []
+ fn test []
+ md dep []
+ fn function (use dep::test_mod_a::function) [requires_import]
+ fn function (use dep::test_mod_b::function) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_const_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub const CONST: i32 = 1;
+}
+
+pub mod test_mod_a {
+ pub const CONST: i64 = 2;
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: i32) { }
+
+fn main() {
+ test(CONST$0);
+}
+"#,
+ expect![[r#"
+ ct CONST (use dep::test_mod_b::CONST) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ ct CONST (use dep::test_mod_a::CONST) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_static_type_completion_info() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+
+pub mod test_mod_b {
+ pub static STATIC: i32 = 5;
+}
+
+pub mod test_mod_a {
+ pub static STATIC: i64 = 5;
+}
+
+//- /main.rs crate:main deps:dep
+
+fn test(input: i32) { }
+
+fn main() {
+ test(STATIC$0);
+}
+"#,
+ expect![[r#"
+ sc STATIC (use dep::test_mod_b::STATIC) [type_could_unify+requires_import]
+ fn main() []
+ fn test(…) []
+ md dep []
+ sc STATIC (use dep::test_mod_a::STATIC) [requires_import]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_self_type_completion_info_with_params() {
+ check_relevance(
+ r#"
+//- /lib.rs crate:dep
+pub struct Struct;
+
+impl Struct {
+ pub fn Function(&self, input: i32) -> bool {
+ false
+ }
+}
+
+
+//- /main.rs crate:main deps:dep
+
+use dep::Struct;
+
+
+fn test(input: fn(&dep::Struct, i32) -> bool) { }
+
+fn main() {
+ test(Struct::Function$0);
+}
+
+"#,
+ expect![[r#"
+ me Function []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_self_type_completion_info() {
+ check_relevance(
+ r#"
+//- /main.rs crate:main
+
+struct Struct;
+
+impl Struct {
+fn test(&self) {
+ func(Self$0);
+ }
+}
+
+fn func(input: Struct) { }
+
+"#,
+ expect![[r#"
+ st Struct [type]
+ st Self [type]
+ sp Self [type]
+ st Struct [type]
+ lc self [local]
+ fn func(…) []
+ me self.test() []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn set_builtin_type_completion_info() {
+ check_relevance(
+ r#"
+//- /main.rs crate:main
+
+fn test(input: bool) { }
+ pub Input: bool = false;
+
+fn main() {
+ let input = false;
+ let inputbad = 3;
+ test(inp$0);
+}
+"#,
+ expect![[r#"
+ lc input [type+name+local]
+ lc inputbad [local]
+ fn main() []
+ fn test(…) []
+ "#]],
+ );
+ }
+
+ #[test]
fn enum_detail_includes_record_fields() {
check(
r#"
@@ -986,6 +1386,7 @@ use self::E::*;
kind: SymbolKind(
Enum,
),
+ detail: "E",
documentation: Documentation(
"enum docs",
),
@@ -1230,6 +1631,7 @@ fn go(world: &WorldSnapshot) { go(w$0) }
st WorldSnapshot {…} []
st &WorldSnapshot {…} [type]
st WorldSnapshot []
+ st &WorldSnapshot [type]
fn go(…) []
"#]],
);
@@ -1329,6 +1731,7 @@ fn main() {
st S []
st &mut S [type]
st S []
+ st &mut S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1345,7 +1748,7 @@ fn main() {
expect![[r#"
lc s [type+name+local]
st S [type]
- st S []
+ st S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1362,7 +1765,7 @@ fn main() {
expect![[r#"
lc ssss [type+local]
st S [type]
- st S []
+ st S [type]
fn foo(…) []
fn main() []
"#]],
@@ -1401,7 +1804,9 @@ fn main() {
st S []
st &S [type]
st S []
+ st &S [type]
st T []
+ st &T [type]
fn foo(…) []
fn main() []
md core []
@@ -1447,7 +1852,9 @@ fn main() {
st S []
st &mut S [type]
st S []
+ st &mut S [type]
st T []
+ st &mut T [type]
fn foo(…) []
fn main() []
md core []
@@ -1486,7 +1893,7 @@ fn bar(t: Foo) {}
expect![[r#"
ev Foo::A [type]
ev Foo::B [type]
- en Foo []
+ en Foo [type]
fn bar(…) []
fn foo() []
"#]],
@@ -1509,6 +1916,7 @@ fn bar(t: &Foo) {}
ev Foo::B []
ev &Foo::B [type]
en Foo []
+ en &Foo [type]
fn bar(…) []
fn foo() []
"#]],
@@ -1542,7 +1950,9 @@ fn main() {
st S []
st &S [type]
st S []
+ st &S [type]
st T []
+ st &T [type]
fn bar() []
fn &bar() [type]
fn foo(…) []
@@ -1596,7 +2006,7 @@ fn main() {
fn struct_field_method_ref() {
check_kinds(
r#"
-struct Foo { bar: u32 }
+struct Foo { bar: u32, qux: fn() }
impl Foo { fn baz(&self) -> u32 { 0 } }
fn foo(f: Foo) { let _: &u32 = f.b$0 }
@@ -1606,24 +2016,44 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
[
CompletionItem {
label: "baz()",
- source_range: 98..99,
- delete: 98..99,
+ source_range: 109..110,
+ delete: 109..110,
insert: "baz()$0",
kind: Method,
lookup: "baz",
detail: "fn(&self) -> u32",
- ref_match: "&@96",
+ ref_match: "&@107",
},
CompletionItem {
label: "bar",
- source_range: 98..99,
- delete: 98..99,
+ source_range: 109..110,
+ delete: 109..110,
insert: "bar",
kind: SymbolKind(
Field,
),
detail: "u32",
- ref_match: "&@96",
+ ref_match: "&@107",
+ },
+ CompletionItem {
+ label: "qux",
+ source_range: 109..110,
+ text_edit: TextEdit {
+ indels: [
+ Indel {
+ insert: "(",
+ delete: 107..107,
+ },
+ Indel {
+ insert: "qux)()",
+ delete: 109..110,
+ },
+ ],
+ },
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "fn()",
},
]
"#]],
@@ -1631,6 +2061,48 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
}
#[test]
+ fn expected_fn_type_ref() {
+ check_kinds(
+ r#"
+struct S { field: fn() }
+
+fn foo() {
+ let foo: fn() = S { fields: || {}}.fi$0;
+}
+"#,
+ &[CompletionItemKind::SymbolKind(SymbolKind::Field)],
+ expect![[r#"
+ [
+ CompletionItem {
+ label: "field",
+ source_range: 76..78,
+ delete: 76..78,
+ insert: "field",
+ kind: SymbolKind(
+ Field,
+ ),
+ detail: "fn()",
+ relevance: CompletionRelevance {
+ exact_name_match: false,
+ type_match: Some(
+ Exact,
+ ),
+ is_local: false,
+ is_item_from_trait: false,
+ is_name_already_imported: false,
+ requires_import: false,
+ is_op_method: false,
+ is_private_editable: false,
+ postfix_match: None,
+ is_definite: false,
+ },
+ },
+ ]
+ "#]],
+ )
+ }
+
+ #[test]
fn qualified_path_ref() {
check_kinds(
r#"
@@ -1689,8 +2161,8 @@ fn foo() {
lc foo [type+local]
ev Foo::A(…) [type_could_unify]
ev Foo::B [type_could_unify]
+ en Foo [type_could_unify]
fn foo() []
- en Foo []
fn bar() []
fn baz() []
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
index 8afce8db5..d23ed71fd 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
@@ -62,6 +62,7 @@ fn render(
),
_ => (name.unescaped().to_smol_str(), name.to_smol_str()),
};
+
let mut item = CompletionItem::new(
if func.self_param(db).is_some() {
CompletionItemKind::Method
@@ -77,8 +78,31 @@ fn render(
.as_assoc_item(ctx.db())
.and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
.map_or(false, |trait_| completion.is_ops_trait(trait_));
+
+ let (has_dot_receiver, has_call_parens, cap) = match func_kind {
+ FuncKind::Function(&PathCompletionCtx {
+ kind: PathKind::Expr { .. },
+ has_call_parens,
+ ..
+ }) => (false, has_call_parens, ctx.completion.config.snippet_cap),
+ FuncKind::Method(&DotAccess { kind: DotAccessKind::Method { has_parens }, .. }, _) => {
+ (true, has_parens, ctx.completion.config.snippet_cap)
+ }
+ FuncKind::Method(DotAccess { kind: DotAccessKind::Field { .. }, .. }, _) => {
+ (true, false, ctx.completion.config.snippet_cap)
+ }
+ _ => (false, false, None),
+ };
+ let complete_call_parens = cap
+ .filter(|_| !has_call_parens)
+ .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?)));
+
item.set_relevance(CompletionRelevance {
- type_match: compute_type_match(completion, &ret_type),
+ type_match: if has_call_parens || complete_call_parens.is_some() {
+ compute_type_match(completion, &ret_type)
+ } else {
+ compute_type_match(completion, &func.ty(db))
+ },
exact_name_match: compute_exact_name_match(completion, &call),
is_op_method,
..ctx.completion_relevance()
@@ -98,47 +122,19 @@ fn render(
_ => (),
}
+ let detail = if ctx.completion.config.full_function_signatures {
+ detail_full(db, func)
+ } else {
+ detail(db, func)
+ };
item.set_documentation(ctx.docs(func))
.set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
- .detail(detail(db, func))
+ .detail(detail)
.lookup_by(name.unescaped().to_smol_str());
- match ctx.completion.config.snippet_cap {
- Some(cap) => {
- let complete_params = match func_kind {
- FuncKind::Function(PathCompletionCtx {
- kind: PathKind::Expr { .. },
- has_call_parens: false,
- ..
- }) => Some(false),
- FuncKind::Method(
- DotAccess {
- kind:
- DotAccessKind::Method { has_parens: false } | DotAccessKind::Field { .. },
- ..
- },
- _,
- ) => Some(true),
- _ => None,
- };
- if let Some(has_dot_receiver) = complete_params {
- if let Some((self_param, params)) =
- params(ctx.completion, func, &func_kind, has_dot_receiver)
- {
- add_call_parens(
- &mut item,
- completion,
- cap,
- call,
- escaped_call,
- self_param,
- params,
- );
- }
- }
- }
- _ => (),
- };
+ if let Some((cap, (self_param, params))) = complete_call_parens {
+ add_call_parens(&mut item, completion, cap, call, escaped_call, self_param, params);
+ }
match ctx.import_to_add {
Some(import_to_add) => {
@@ -263,6 +259,21 @@ fn detail(db: &dyn HirDatabase, func: hir::Function) -> String {
detail
}
+fn detail_full(db: &dyn HirDatabase, func: hir::Function) -> String {
+ let signature = format!("{}", func.display(db));
+ let mut detail = String::with_capacity(signature.len());
+
+ for segment in signature.split_whitespace() {
+ if !detail.is_empty() {
+ detail.push(' ');
+ }
+
+ detail.push_str(segment);
+ }
+
+ detail
+}
+
fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String {
if let Some(self_param) = func.self_param(db) {
let assoc_fn_params = func.assoc_fn_params(db);
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
index 343719c53..e667e2e01 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs
@@ -179,8 +179,9 @@ fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<V
item,
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
- Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item, None)))
+ Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
};
let mut res = Vec::with_capacity(requires.len());
for import in requires {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index 2464e8d5f..f28afacc5 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -64,9 +64,11 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: false,
+ full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
prefer_no_std: false,
+ prefer_prelude: true,
insert_use: InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: PrefixKind::Plain,
@@ -148,16 +150,29 @@ fn render_completion_list(completions: Vec<CompletionItem>) -> String {
fn monospace_width(s: &str) -> usize {
s.chars().count()
}
- let label_width =
- completions.iter().map(|it| monospace_width(&it.label)).max().unwrap_or_default().min(22);
+ let label_width = completions
+ .iter()
+ .map(|it| {
+ monospace_width(&it.label)
+ + monospace_width(it.label_detail.as_deref().unwrap_or_default())
+ })
+ .max()
+ .unwrap_or_default()
+ .min(22);
completions
.into_iter()
.map(|it| {
let tag = it.kind.tag();
let var_name = format!("{tag} {}", it.label);
let mut buf = var_name;
+ if let Some(ref label_detail) = it.label_detail {
+ format_to!(buf, "{label_detail}");
+ }
if let Some(detail) = it.detail {
- let width = label_width.saturating_sub(monospace_width(&it.label));
+ let width = label_width.saturating_sub(
+ monospace_width(&it.label)
+ + monospace_width(&it.label_detail.unwrap_or_default()),
+ );
format_to!(buf, "{:width$} {}", "", detail, width = width);
}
if it.deprecated {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
index be5b7f8a3..b4f936b35 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/expression.rs
@@ -26,22 +26,22 @@ fn baz() {
"#,
// This should not contain `FooDesc {…}`.
expect![[r#"
- ct CONST
- en Enum
+ ct CONST Unit
+ en Enum Enum
fn baz() fn()
fn create_foo(…) fn(&FooDesc)
fn function() fn()
ma makro!(…) macro_rules! makro
md _69latrick
md module
- sc STATIC
- st FooDesc
- st Record
- st Tuple
- st Unit
- un Union
+ sc STATIC Unit
+ st FooDesc FooDesc
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ un Union Union
ev TupleV(…) TupleV(u32)
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -83,7 +83,7 @@ fn func(param0 @ (param1, param2): (i32, i32)) {
lc param0 (i32, i32)
lc param1 i32
lc param2 i32
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -117,24 +117,24 @@ impl Unit {
"#,
// `self` is in here twice, once as the module, once as the local
expect![[r#"
- ct CONST
+ ct CONST Unit
cp CONST_PARAM
- en Enum
+ en Enum Enum
fn function() fn()
fn local_func() fn()
lc self Unit
ma makro!(…) macro_rules! makro
md module
md qualified
- sp Self
- sc STATIC
- st Record
- st Tuple
- st Unit
+ sp Self Unit
+ sc STATIC Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tp TypeParam
- un Union
+ un Union Union
ev TupleV(…) TupleV(u32)
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -181,18 +181,18 @@ impl Unit {
}
"#,
expect![[r#"
- ct CONST
- en Enum
+ ct CONST Unit
+ en Enum Enum
fn function() fn()
ma makro!(…) macro_rules! makro
md module
md qualified
- sc STATIC
- st Record
- st Tuple
- st Unit
+ sc STATIC Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
+ un Union Union
ev TupleV(…) TupleV(u32)
?? Unresolved
"#]],
@@ -211,7 +211,7 @@ fn complete_in_block() {
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -256,7 +256,7 @@ fn complete_after_if_expr() {
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -304,7 +304,7 @@ fn complete_in_match_arm() {
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -328,7 +328,7 @@ fn completes_in_loop_ctx() {
r"fn my() { loop { $0 } }",
expect![[r#"
fn my() fn()
- bt u32
+ bt u32 u32
kw break
kw const
kw continue
@@ -370,7 +370,7 @@ fn completes_in_let_initializer() {
r#"fn main() { let _ = $0 }"#,
expect![[r#"
fn main() fn()
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -403,8 +403,8 @@ fn foo() {
"#,
expect![[r#"
fn foo() fn()
- st Foo
- bt u32
+ st Foo Foo
+ bt u32 u32
kw crate::
kw false
kw for
@@ -439,7 +439,7 @@ fn foo() {
expect![[r#"
fn foo() fn()
lc bar i32
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -470,7 +470,7 @@ fn quux(x: i32) {
fn quux(…) fn(i32)
lc x i32
ma m!(…) macro_rules! m
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -497,7 +497,7 @@ fn quux(x: i32) {
fn quux(…) fn(i32)
lc x i32
ma m!(…) macro_rules! m
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -683,11 +683,11 @@ fn brr() {
}
"#,
expect![[r#"
- en HH
+ en HH HH
fn brr() fn()
- st YoloVariant
+ st YoloVariant YoloVariant
st YoloVariant {…} YoloVariant { f: usize }
- bt u32
+ bt u32 u32
kw crate::
kw false
kw for
@@ -749,7 +749,7 @@ fn foo() { if foo {} $0 }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -789,7 +789,7 @@ fn foo() { if foo {} el$0 }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -829,7 +829,7 @@ fn foo() { bar(if foo {} $0) }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw crate::
kw else
kw else if
@@ -853,7 +853,7 @@ fn foo() { bar(if foo {} el$0) }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw crate::
kw else
kw else if
@@ -877,7 +877,7 @@ fn foo() { if foo {} $0 let x = 92; }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -917,7 +917,7 @@ fn foo() { if foo {} el$0 let x = 92; }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -957,7 +957,7 @@ fn foo() { if foo {} el$0 { let x = 92; } }
"#,
expect![[r#"
fn foo() fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw else
@@ -1009,7 +1009,7 @@ pub struct UnstableThisShouldNotBeListed;
expect![[r#"
fn main() fn()
md std
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1060,8 +1060,8 @@ pub struct UnstableButWeAreOnNightlyAnyway;
expect![[r#"
fn main() fn()
md std
- st UnstableButWeAreOnNightlyAnyway
- bt u32
+ st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1094,3 +1094,157 @@ pub struct UnstableButWeAreOnNightlyAnyway;
"#]],
);
}
+
+#[test]
+fn inside_format_args_completions_work() {
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{}", Foo.$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{}", Foo.f$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+}
+
+#[test]
+fn inside_faulty_format_args_completions_work() {
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("", Foo.$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("", Foo.f$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{} {named} {captured} {named} {}", a, named = c, Foo.f$0);
+}
+"#,
+ expect![[r#"
+ me foo() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ "#]],
+ );
+ check_empty(
+ r#"
+//- minicore: fmt
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+
+fn main() {
+ format_args!("{", Foo.f$0);
+}
+"#,
+ expect![[r#"
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn if if expr {}
+ sn match match expr {}
+ sn not !expr
+ sn ref &expr
+ sn refm &mut expr
+ sn unsafe unsafe {}
+ sn while while expr {}
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
index 4cdfd546f..c58374f2e 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -116,19 +116,47 @@ fn main() {
}
#[test]
-fn short_paths_are_ignored() {
- cov_mark::check!(flyimport_exact_on_short_path);
+fn short_paths_are_prefix_matched() {
+ cov_mark::check!(flyimport_prefix_on_short_path);
check(
r#"
//- /lib.rs crate:dep
-pub struct Bar;
+pub struct Barc;
pub struct Rcar;
pub struct Rc;
+pub const RC: () = ();
pub mod some_module {
pub struct Bar;
pub struct Rcar;
pub struct Rc;
+ pub const RC: () = ();
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ Rc$0
+}
+"#,
+ expect![[r#"
+ st Rc (use dep::Rc) Rc
+ st Rcar (use dep::Rcar) Rcar
+ st Rc (use dep::some_module::Rc) Rc
+ st Rcar (use dep::some_module::Rcar) Rcar
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct Barc;
+pub struct Rcar;
+pub struct Rc;
+pub const RC: () = ();
+pub mod some_module {
+ pub struct Bar;
+ pub struct Rcar;
+ pub struct Rc;
+ pub const RC: () = ();
}
//- /main.rs crate:main deps:dep
@@ -137,8 +165,36 @@ fn main() {
}
"#,
expect![[r#"
- st Rc (use dep::Rc)
- st Rc (use dep::some_module::Rc)
+ ct RC (use dep::RC) ()
+ st Rc (use dep::Rc) Rc
+ st Rcar (use dep::Rcar) Rcar
+ ct RC (use dep::some_module::RC) ()
+ st Rc (use dep::some_module::Rc) Rc
+ st Rcar (use dep::some_module::Rcar) Rcar
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:dep
+pub struct Barc;
+pub struct Rcar;
+pub struct Rc;
+pub const RC: () = ();
+pub mod some_module {
+ pub struct Bar;
+ pub struct Rcar;
+ pub struct Rc;
+ pub const RC: () = ();
+}
+
+//- /main.rs crate:main deps:dep
+fn main() {
+ RC$0
+}
+"#,
+ expect![[r#"
+ ct RC (use dep::RC) ()
+ ct RC (use dep::some_module::RC) ()
"#]],
);
}
@@ -171,10 +227,10 @@ fn main() {
}
"#,
expect![[r#"
- st ThirdStruct (use dep::some_module::ThirdStruct)
- st AfterThirdStruct (use dep::some_module::AfterThirdStruct)
- st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct)
- "#]],
+ st ThirdStruct (use dep::some_module::ThirdStruct) ThirdStruct
+ st AfterThirdStruct (use dep::some_module::AfterThirdStruct) AfterThirdStruct
+ st ThiiiiiirdStruct (use dep::some_module::ThiiiiiirdStruct) ThiiiiiirdStruct
+ "#]],
);
}
@@ -253,7 +309,7 @@ fn trait_const_fuzzy_completion() {
check(
fixture,
expect![[r#"
- ct SPECIAL_CONST (use dep::test_mod::TestTrait)
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8
"#]],
);
@@ -541,8 +597,8 @@ fn main() {
}
"#,
expect![[r#"
- ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+ ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
"#]],
);
}
@@ -661,7 +717,7 @@ fn main() {
check(
fixture,
expect![[r#"
- st Item (use foo::bar::baz::Item)
+ st Item (use foo::bar) Item
"#]],
);
@@ -669,19 +725,19 @@ fn main() {
"Item",
fixture,
r#"
- use foo::bar;
+use foo::bar;
- mod foo {
- pub mod bar {
- pub mod baz {
- pub struct Item;
- }
- }
+mod foo {
+ pub mod bar {
+ pub mod baz {
+ pub struct Item;
}
+ }
+}
- fn main() {
- bar::baz::Item
- }"#,
+fn main() {
+ bar::baz::Item
+}"#,
);
}
@@ -703,7 +759,7 @@ fn main() {
check(
fixture,
expect![[r#"
- ct TEST_ASSOC (use foo::Item)
+ ct TEST_ASSOC (use foo::Item) usize
"#]],
);
@@ -747,8 +803,8 @@ fn main() {
check(
fixture,
expect![[r#"
- ct TEST_ASSOC (use foo::bar::Item)
- "#]],
+ ct TEST_ASSOC (use foo::bar) usize
+ "#]],
);
check_edit(
@@ -841,8 +897,8 @@ fn main() {
TES$0
}"#,
expect![[r#"
- ct TEST_CONST (use foo::TEST_CONST)
- "#]],
+ ct TEST_CONST (use foo::TEST_CONST) usize
+ "#]],
);
check(
@@ -858,9 +914,9 @@ fn main() {
tes$0
}"#,
expect![[r#"
- ct TEST_CONST (use foo::TEST_CONST)
- fn test_function() (use foo::test_function) fn() -> i32
- "#]],
+ ct TEST_CONST (use foo::TEST_CONST) usize
+ fn test_function() (use foo::test_function) fn() -> i32
+ "#]],
);
check(
@@ -873,9 +929,9 @@ mod foo {
}
fn main() {
- Te$0
+ Tes$0
}"#,
- expect![[]],
+ expect![""],
);
}
@@ -1082,8 +1138,8 @@ mod mud {
}
"#,
expect![[r#"
- st Struct (use crate::Struct)
- "#]],
+ st Struct (use crate::Struct) Struct
+ "#]],
);
}
@@ -1194,7 +1250,7 @@ enum Foo {
}
}"#,
expect![[r#"
- st Barbara (use foo::Barbara)
+ st Barbara (use foo::Barbara) Barbara
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
index 3ef2a7c94..de3fd0518 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item.rs
@@ -18,15 +18,15 @@ fn target_type_or_trait_in_impl_block() {
impl Tra$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -40,15 +40,15 @@ fn target_type_in_trait_impl_block() {
impl Trait for Str$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
index 8af6cce98..67cf551fc 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/pattern.rs
@@ -355,6 +355,35 @@ fn outer(Foo { bar$0 }: Foo) {}
}
#[test]
+fn completes_in_record_field_pat_with_generic_type_alias() {
+ check_empty(
+ r#"
+type Wrap<T> = T;
+
+enum X {
+ A { cool: u32, stuff: u32 },
+ B,
+}
+
+fn main() {
+ let wrapped = Wrap::<X>::A {
+ cool: 100,
+ stuff: 100,
+ };
+
+ if let Wrap::<X>::A { $0 } = &wrapped {};
+}
+"#,
+ expect![[r#"
+ fd cool u32
+ fd stuff u32
+ kw mut
+ kw ref
+ "#]],
+ )
+}
+
+#[test]
fn completes_in_fn_param() {
check_empty(
r#"
@@ -406,7 +435,7 @@ fn foo() {
}
"#,
expect![[r#"
- st Bar
+ st Bar Bar
kw crate::
kw self::
"#]],
@@ -421,7 +450,7 @@ fn foo() {
}
"#,
expect![[r#"
- st Foo
+ st Foo Foo
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
index 789ad6634..46a3e97d3 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs
@@ -16,16 +16,16 @@ fn predicate_start() {
struct Foo<'lt, T, const C: usize> where $0 {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -89,16 +89,16 @@ fn param_list_for_for_pred() {
struct Foo<'lt, T, const C: usize> where for<'a> $0 {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -114,16 +114,16 @@ impl Record {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Record
- st Tuple
- st Unit
+ sp Self Record
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
index 65cefdb08..18afde1b7 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/record.rs
@@ -186,10 +186,10 @@ fn main() {
lc foo Foo
lc thing i32
md core
- st Foo
+ st Foo Foo
st Foo {…} Foo { foo1: u32, foo2: u32 }
tt Default
- bt u32
+ bt u32 u32
kw crate::
kw self::
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
index e80a28904..f96fb71f2 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -2,10 +2,15 @@
use expect_test::{expect, Expect};
-use crate::tests::{
- check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character,
+use crate::{
+ tests::{
+ check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character,
+ },
+ CompletionItemKind,
};
+use super::{do_completion_with_config, TEST_CONFIG};
+
fn check_no_kw(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw(ra_fixture);
expect.assert_eq(&actual)
@@ -79,10 +84,10 @@ pub mod prelude {
}
"#,
expect![[r#"
- md std
- st Option
- bt u32
- "#]],
+ md std
+ st Option Option
+ bt u32 u32
+ "#]],
);
}
@@ -107,11 +112,11 @@ mod macros {
}
"#,
expect![[r#"
- fn f() fn()
- ma concat!(…) macro_rules! concat
- md std
- bt u32
- "#]],
+ fn f() fn()
+ ma concat!(…) macro_rules! concat
+ md std
+ bt u32 u32
+ "#]],
);
}
@@ -137,11 +142,11 @@ pub mod prelude {
}
"#,
expect![[r#"
- md core
- md std
- st String
- bt u32
- "#]],
+ md core
+ md std
+ st String String
+ bt u32 u32
+ "#]],
);
}
@@ -166,10 +171,10 @@ pub mod prelude {
}
"#,
expect![[r#"
- fn f() fn()
- md std
- bt u32
- "#]],
+ fn f() fn()
+ md std
+ bt u32 u32
+ "#]],
);
}
@@ -441,10 +446,10 @@ mod p {
}
"#,
expect![[r#"
- ct RIGHT_CONST
- fn right_fn() fn()
- st RightType
- "#]],
+ ct RIGHT_CONST u32
+ fn right_fn() fn()
+ st RightType WrongType
+ "#]],
);
check_edit(
@@ -876,7 +881,7 @@ fn main() {
fn main() fn()
lc foobar i32
ma x!(…) macro_rules! x
- bt u32
+ bt u32 u32
"#]],
)
}
@@ -1003,8 +1008,8 @@ fn here_we_go() {
"#,
expect![[r#"
fn here_we_go() fn()
- st Foo (alias Bar)
- bt u32
+ st Foo (alias Bar) Foo
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1052,8 +1057,8 @@ fn here_we_go() {
"#,
expect![[r#"
fn here_we_go() fn()
- st Foo (alias Bar, Qux, Baz)
- bt u32
+ st Foo (alias Bar, Qux, Baz) Foo
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1173,7 +1178,7 @@ fn bar() { qu$0 }
expect![[r#"
fn bar() fn()
fn foo() (alias qux) fn()
- bt u32
+ bt u32 u32
kw const
kw crate::
kw enum
@@ -1222,7 +1227,7 @@ fn here_we_go() {
}
"#,
expect![[r#"
- st Bar (alias Qux)
+ st Bar (alias Qux) Bar
"#]],
);
}
@@ -1241,7 +1246,7 @@ fn here_we_go() {
}
"#,
expect![[r#"
- st Bar (alias Qux)
+ st Bar (alias Qux) Bar
"#]],
);
}
@@ -1262,8 +1267,8 @@ fn here_we_go() {
expect![[r#"
fn here_we_go() fn()
md foo
- st Bar (alias Qux) (use foo::Bar)
- bt u32
+ st Bar (alias Qux) (use foo::Bar) Bar
+ bt u32 u32
kw crate::
kw false
kw for
@@ -1282,6 +1287,30 @@ fn here_we_go() {
}
#[test]
+fn completes_only_public() {
+ check(
+ r#"
+//- /e.rs
+pub(self) fn i_should_be_hidden() {}
+pub(in crate::e) fn i_should_also_be_hidden() {}
+pub fn i_am_public () {}
+
+//- /lib.rs crate:krate
+pub mod e;
+
+//- /main.rs deps:krate crate:main
+use krate::e;
+fn main() {
+ e::$0
+}"#,
+ expect![
+ "fn i_am_public() fn()
+"
+ ],
+ )
+}
+
+#[test]
fn completion_filtering_excludes_non_identifier_doc_aliases() {
check_edit(
"PartialOrdcmporder",
@@ -1303,3 +1332,176 @@ struct Foo<T: PartialOrd
"#,
);
}
+
+fn check_signatures(src: &str, kind: CompletionItemKind, reduced: Expect, full: Expect) {
+ const FULL_SIGNATURES_CONFIG: crate::CompletionConfig = {
+ let mut x = TEST_CONFIG;
+ x.full_function_signatures = true;
+ x
+ };
+
+ // reduced signature
+ let completion = do_completion_with_config(TEST_CONFIG, src, kind);
+ assert!(completion[0].detail.is_some());
+ reduced.assert_eq(completion[0].detail.as_ref().unwrap());
+
+ // full signature
+ let completion = do_completion_with_config(FULL_SIGNATURES_CONFIG, src, kind);
+ assert!(completion[0].detail.is_some());
+ full.assert_eq(completion[0].detail.as_ref().unwrap());
+}
+
+#[test]
+fn respects_full_function_signatures() {
+ check_signatures(
+ r#"
+pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone, { 0u8 }
+fn main() { fo$0 }
+"#,
+ CompletionItemKind::SymbolKind(ide_db::SymbolKind::Function),
+ expect!("fn(&mut T) -> u8"),
+ expect!("pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone,"),
+ );
+
+ check_signatures(
+ r#"
+struct Foo;
+struct Bar;
+impl Bar {
+ pub const fn baz(x: Foo) -> ! { loop {} };
+}
+
+fn main() { Bar::b$0 }
+"#,
+ CompletionItemKind::SymbolKind(ide_db::SymbolKind::Function),
+ expect!("const fn(Foo) -> !"),
+ expect!("pub const fn baz(x: Foo) -> !"),
+ );
+
+ check_signatures(
+ r#"
+struct Foo;
+struct Bar;
+impl Bar {
+ pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> ! { loop {} };
+}
+
+fn main() {
+ let mut bar = Bar;
+ bar.b$0
+}
+"#,
+ CompletionItemKind::Method,
+ expect!("const fn(&'foo mut self, &Foo) -> !"),
+ expect!("pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> !"),
+ );
+}
+
+#[test]
+fn skips_underscore() {
+ check_with_trigger_character(
+ r#"
+fn foo(_$0) { }
+"#,
+ Some('_'),
+ expect![[r#""#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo(_: _$0) { }
+"#,
+ Some('_'),
+ expect![[r#""#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo<T>() {
+ foo::<_$0>();
+}
+"#,
+ Some('_'),
+ expect![[r#""#]],
+ );
+ // underscore expressions are fine, they are invalid so the user definitely meant to type an
+ // underscored name here
+ check_with_trigger_character(
+ r#"
+fn foo() {
+ _$0
+}
+"#,
+ Some('_'),
+ expect![[r#"
+ fn foo() fn()
+ bt u32 u32
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw let
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ );
+}
+
+#[test]
+fn no_skip_underscore_ident() {
+ check_with_trigger_character(
+ r#"
+fn foo(a_$0) { }
+"#,
+ Some('_'),
+ expect![[r#"
+ kw mut
+ kw ref
+ "#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo(_: a_$0) { }
+"#,
+ Some('_'),
+ expect![[r#"
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+ check_with_trigger_character(
+ r#"
+fn foo<T>() {
+ foo::<a_$0>();
+}
+"#,
+ Some('_'),
+ expect![[r#"
+ tp T
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
index d518dd764..c7161f82c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs
@@ -17,18 +17,18 @@ struct Foo<'lt, T, const C: usize> {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ sp Self Foo<'_, {unknown}, _>
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -42,18 +42,18 @@ fn tuple_struct_field() {
struct Foo<'lt, T, const C: usize>(f$0);
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ sp Self Foo<'_, {unknown}, _>
+ st Foo<…> Foo<'_, {unknown}, _>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw pub
kw pub(crate)
@@ -70,16 +70,16 @@ fn fn_return_type() {
fn x<'lt, T, const C: usize>() -> $0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -100,19 +100,19 @@ fn foo() -> B$0 {
}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Record
- st Tuple
- st Unit
- tt Trait
- un Union
- bt u32
- it ()
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Trait
+ un Union Union
+ bt u32 u32
+ it ()
+ kw crate::
+ kw self::
+ "#]],
)
}
@@ -124,16 +124,16 @@ struct Foo<T>(T);
const FOO: $0 = Foo(2);
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<{unknown}>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it Foo<i32>
kw crate::
kw self::
@@ -151,15 +151,15 @@ fn f2() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it i32
kw crate::
kw self::
@@ -179,15 +179,15 @@ fn f2() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it u64
kw crate::
kw self::
@@ -204,15 +204,15 @@ fn f2(x: u64) -> $0 {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it u64
kw crate::
kw self::
@@ -230,15 +230,15 @@ fn f2(x: $0) {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it i32
kw crate::
kw self::
@@ -262,17 +262,17 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md a
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it a::Foo<a::Foo<i32>>
kw crate::
kw self::
@@ -291,17 +291,17 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Foo<…>
- st Record
- st Tuple
- st Unit
+ st Foo<…> Foo<{unknown}>
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
it Foo<i32>
kw crate::
kw self::
@@ -319,16 +319,16 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -341,14 +341,14 @@ fn foo<'lt, T, const C: usize>() {
}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
- un Union
+ un Union Union
"#]],
);
}
@@ -384,18 +384,18 @@ trait Trait2<T>: Trait1 {
fn foo<'lt, T: Trait2<$0>, const CONST_PARAM: usize>(_: T) {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tt Trait1
tt Trait2
tp T
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -409,15 +409,15 @@ trait Trait2<T> {
fn foo<'lt, T: Trait2<self::$0>, const CONST_PARAM: usize>(_: T) {}
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt Trait
tt Trait2
- un Union
+ un Union Union
"#]],
);
}
@@ -434,18 +434,18 @@ trait Tr<T> {
impl Tr<$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- sp Self
- st Record
- st S
- st Tuple
- st Unit
+ sp Self dyn Tr<{unknown}>
+ st Record Record
+ st S S
+ st Tuple Tuple
+ st Unit Unit
tt Tr
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -481,16 +481,16 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -506,16 +506,16 @@ trait MyTrait<T, U> {
fn f(t: impl MyTrait<u8, u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -549,16 +549,16 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -574,18 +574,18 @@ trait MyTrait<T, U = u8> {
fn f(t: impl MyTrait<u8, u$0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
ta Item1 = (as MyTrait) type Item1
ta Item2 = (as MyTrait) type Item2
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -619,16 +619,16 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = $0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -644,16 +644,16 @@ trait MyTrait {
fn f(t: impl MyTrait<Item1 = u8, Item2 = $0
"#,
expect![[r#"
- en Enum
+ en Enum Enum
ma makro!(…) macro_rules! makro
md module
- st Record
- st Tuple
- st Unit
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
tt MyTrait
tt Trait
- un Union
- bt u32
+ un Union Union
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -668,7 +668,7 @@ trait MyTrait {
fn f(t: impl MyTrait<C = $0
"#,
expect![[r#"
- ct CONST
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -691,9 +691,9 @@ pub struct S;
"#,
expect![[r#"
md std
- sp Self
- st Foo
- bt u32
+ sp Self Foo
+ st Foo Foo
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -716,10 +716,10 @@ pub struct S;
"#,
expect![[r#"
md std
- sp Self
- st Foo
- st S
- bt u32
+ sp Self Foo
+ st Foo Foo
+ st S S
+ bt u32 u32
kw crate::
kw self::
"#]],
@@ -739,19 +739,19 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Foo
- st Record
- st Tuple
- st Unit
- tt Trait
- un Union
- bt u32
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Trait
+ un Union Union
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
);
// FIXME: This should probably also suggest completions for types, at least those that have
// associated constants usable in this position. For example, a user could be typing
@@ -766,12 +766,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Method generic params
@@ -785,19 +785,19 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Foo
- st Record
- st Tuple
- st Unit
- tt Trait
- un Union
- bt u32
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Trait
+ un Union Union
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
);
check(
r#"
@@ -809,12 +809,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Associated type generic params
@@ -828,20 +828,20 @@ fn completes_const_and_type_generics_separately() {
fn foo(_: impl Bar<Baz<F$0, 0> = ()>) {}
"#,
expect![[r#"
- en Enum
- ma makro!(…) macro_rules! makro
- md module
- st Foo
- st Record
- st Tuple
- st Unit
- tt Bar
- tt Trait
- un Union
- bt u32
- kw crate::
- kw self::
- "#]],
+ en Enum Enum
+ ma makro!(…) macro_rules! makro
+ md module
+ st Foo Foo
+ st Record Record
+ st Tuple Tuple
+ st Unit Unit
+ tt Bar
+ tt Trait
+ un Union Union
+ bt u32 u32
+ kw crate::
+ kw self::
+ "#]],
);
check(
r#"
@@ -853,12 +853,12 @@ fn completes_const_and_type_generics_separately() {
fn foo<T: Bar<Baz<(), $0> = ()>>() {}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Type generic params
@@ -871,12 +871,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Type alias generic params
@@ -890,12 +890,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Enum variant params
@@ -908,12 +908,12 @@ fn completes_const_and_type_generics_separately() {
}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Trait params
@@ -924,12 +924,12 @@ fn completes_const_and_type_generics_separately() {
impl Foo<(), $0> for () {}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Trait alias params
@@ -942,12 +942,12 @@ fn completes_const_and_type_generics_separately() {
fn foo<T: Bar<X$0, ()>>() {}
"#,
expect![[r#"
- ct CONST
- ct X
- ma makro!(…) macro_rules! makro
- kw crate::
- kw self::
- "#]],
+ ct CONST Unit
+ ct X usize
+ ma makro!(…) macro_rules! makro
+ kw crate::
+ kw self::
+ "#]],
);
// Omitted lifetime params
@@ -957,7 +957,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<F$0, _>; }
"#,
expect![[r#"
- ct CONST
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
@@ -970,7 +970,7 @@ struct S<'a, 'b, const C: usize, T>(core::marker::PhantomData<&'a &'b T>);
fn foo<'a>() { S::<'static, 'static, F$0, _>; }
"#,
expect![[r#"
- ct CONST
+ ct CONST Unit
ma makro!(…) macro_rules! makro
kw crate::
kw self::
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
index 4c74dba52..167bdec54 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/use_tree.rs
@@ -65,7 +65,7 @@ use self::{foo::*, bar$0};
"#,
expect![[r#"
md foo
- st S
+ st S S
"#]],
);
}
@@ -82,7 +82,7 @@ mod foo {
use foo::{bar::$0}
"#,
expect![[r#"
- st FooBar
+ st FooBar FooBar
"#]],
);
check(
@@ -115,7 +115,7 @@ mod foo {
use foo::{bar::{baz::$0}}
"#,
expect![[r#"
- st FooBarBaz
+ st FooBarBaz FooBarBaz
"#]],
);
check(
@@ -152,7 +152,7 @@ struct Bar;
"#,
expect![[r#"
ma foo macro_rules! foo_
- st Foo
+ st Foo Foo
"#]],
);
}
@@ -193,7 +193,7 @@ struct Bar;
"#,
expect![[r#"
md foo
- st Bar
+ st Bar Bar
"#]],
);
}
@@ -212,7 +212,7 @@ struct Bar;
expect![[r#"
md bar
md foo
- st Bar
+ st Bar Bar
"#]],
);
}
@@ -230,7 +230,7 @@ mod a {
}
"#,
expect![[r#"
- ct A
+ ct A usize
md b
kw super::
"#]],
@@ -248,7 +248,7 @@ struct Bar;
"#,
expect![[r#"
md foo
- st Bar
+ st Bar Bar
"#]],
);
}
@@ -265,7 +265,7 @@ pub mod foo {}
"#,
expect![[r#"
md foo
- st Foo
+ st Foo Foo
"#]],
);
}
@@ -425,7 +425,7 @@ marco_rules! m { () => {} }
expect![[r#"
fn foo fn()
md simd
- st S
+ st S S
"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index faec74206..4a2e770f1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -13,16 +13,16 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-tracing = "0.1.35"
-rayon = "1.6.1"
+tracing.workspace = true
+rayon.workspace = true
fst = { version = "0.4.7", default-features = false }
rustc-hash = "1.1.0"
once_cell = "1.17.0"
-either = "1.7.0"
-itertools = "0.10.5"
+either.workspace = true
+itertools.workspace = true
arrayvec = "0.7.2"
-indexmap = "2.0.0"
-memchr = "2.5.0"
+indexmap.workspace = true
+memchr = "2.6.4"
triomphe.workspace = true
nohash-hasher.workspace = true
@@ -43,7 +43,7 @@ line-index.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
oorandom = "11.1.3"
-xshell = "0.2.2"
+xshell.workspace = true
# local deps
test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index a0b05c87a..343be870c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -97,13 +97,13 @@ impl RootDatabase {
// ExpandDatabase
hir::db::AstIdMapQuery
- hir::db::ParseMacroExpansionQuery
- hir::db::InternMacroCallQuery
- hir::db::MacroArgNodeQuery
hir::db::DeclMacroExpanderQuery
- hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery
- hir::db::HygieneFrameQuery
+ hir::db::InternMacroCallQuery
+ hir::db::InternSyntaxContextQuery
+ hir::db::MacroArgQuery
+ hir::db::ParseMacroExpansionQuery
+ hir::db::RealSpanMapQuery
// DefDatabase
hir::db::FileItemTreeQuery
@@ -143,6 +143,13 @@ impl RootDatabase {
hir::db::FunctionVisibilityQuery
hir::db::ConstVisibilityQuery
hir::db::CrateSupportsNoStdQuery
+ hir::db::BlockItemTreeQueryQuery
+ hir::db::ExternCrateDeclDataQuery
+ hir::db::LangAttrQuery
+ hir::db::InternAnonymousConstQuery
+ hir::db::InternExternCrateQuery
+ hir::db::InternInTypeConstQuery
+ hir::db::InternUseQuery
// HirDatabase
hir::db::InferQueryQuery
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index 4ce80532e..ded5d4e3d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -161,8 +161,8 @@ impl IdentClass {
ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator),
ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator),
ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator),
- ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator),
- ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator),
+ ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema, &prefix_expr).map(IdentClass::Operator),
+ ast::TryExpr(try_expr) => OperatorClass::classify_try(sema, &try_expr).map(IdentClass::Operator),
_ => None,
}
}
@@ -492,7 +492,7 @@ impl NameRefClass {
match_ast! {
match parent {
ast::MethodCallExpr(method_call) => {
- sema.resolve_method_call_field_fallback(&method_call)
+ sema.resolve_method_call_fallback(&method_call)
.map(|it| {
it.map_left(Definition::Function)
.map_right(Definition::Field)
@@ -500,9 +500,12 @@ impl NameRefClass {
})
},
ast::FieldExpr(field_expr) => {
- sema.resolve_field(&field_expr)
- .map(Definition::Field)
- .map(NameRefClass::Definition)
+ sema.resolve_field_fallback(&field_expr)
+ .map(|it| {
+ it.map_left(Definition::Field)
+ .map_right(Definition::Function)
+ .either(NameRefClass::Definition, NameRefClass::Definition)
+ })
},
ast::RecordPatField(record_pat_field) => {
sema.resolve_record_pat_field(&record_pat_field)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
index b63dde2c2..722517a76 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -54,6 +54,10 @@ impl FamousDefs<'_, '_> {
self.find_trait("core:convert:Into")
}
+ pub fn core_convert_Index(&self) -> Option<Trait> {
+ self.find_trait("core:ops:Index")
+ }
+
pub fn core_option_Option(&self) -> Option<Enum> {
self.find_enum("core:option:Option")
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
index 57563a174..1cb6ff862 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
@@ -9,12 +9,18 @@ pub struct LintGroup {
pub lint: Lint,
pub children: &'static [&'static str],
}
+
pub const DEFAULT_LINTS: &[Lint] = &[
Lint {
label: "absolute_paths_not_starting_with_crate",
description: r##"fully qualified paths that start with a module name instead of `crate`, `self`, or an extern crate name"##,
},
Lint { label: "ambiguous_associated_items", description: r##"ambiguous associated items"## },
+ Lint {
+ label: "ambiguous_glob_imports",
+ description: r##"detects certain glob imports that require reporting an ambiguity error"##,
+ },
+ Lint { label: "ambiguous_glob_reexports", description: r##"ambiguous glob re-exports"## },
Lint { label: "anonymous_parameters", description: r##"detects anonymous parameters"## },
Lint { label: "arithmetic_overflow", description: r##"arithmetic operation overflows"## },
Lint {
@@ -25,6 +31,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "asm_sub_register",
description: r##"using only a subset of a register for inline asm inputs"##,
},
+ Lint {
+ label: "async_fn_in_trait",
+ description: r##"use of `async fn` in definition of a publicly-reachable trait"##,
+ },
Lint { label: "bad_asm_style", description: r##"incorrect use of inline assembly"## },
Lint {
label: "bare_trait_objects",
@@ -40,6 +50,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"`break` expression with label and unlabeled loop as value expression"##,
},
Lint {
+ label: "byte_slice_in_packed_struct_with_derive",
+ description: r##"`[u8]` or `str` used in a packed struct with `derive`"##,
+ },
+ Lint {
label: "cenum_impl_drop_cast",
description: r##"a C-like enum implementing Drop is cast"##,
},
@@ -52,6 +66,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"distinct impls distinguished only by the leak-check code"##,
},
Lint {
+ label: "coinductive_overlap_in_coherence",
+ description: r##"impls that are not considered to overlap may be considered to overlap in the future"##,
+ },
+ Lint {
label: "conflicting_repr_hints",
description: r##"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice"##,
},
@@ -60,10 +78,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects visually confusable pairs between identifiers"##,
},
Lint {
- label: "const_err",
- description: r##"constant evaluation encountered erroneous expression"##,
- },
- Lint {
label: "const_evaluatable_unchecked",
description: r##"detects a generic constant is used in a type without a emitting a warning"##,
},
@@ -71,13 +85,25 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "const_item_mutation",
description: r##"detects attempts to mutate a `const` item"##,
},
+ Lint {
+ label: "const_patterns_without_partial_eq",
+ description: r##"constant in pattern does not implement `PartialEq`"##,
+ },
Lint { label: "dead_code", description: r##"detect unused, unexported items"## },
Lint { label: "deprecated", description: r##"detects use of deprecated items"## },
Lint {
+ label: "deprecated_cfg_attr_crate_type_name",
+ description: r##"detects usage of `#![cfg_attr(..., crate_type/crate_name = "...")]`"##,
+ },
+ Lint {
label: "deprecated_in_future",
description: r##"detects use of items that will be deprecated in a future version"##,
},
Lint {
+ label: "deprecated_where_clause_location",
+ description: r##"deprecated where clause location"##,
+ },
+ Lint {
label: "deref_into_dyn_supertrait",
description: r##"`Deref` implementation usage with a supertrait trait object for output might be shadowed in the future"##,
},
@@ -90,10 +116,23 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"bounds of the form `T: Drop` are most likely incorrect"##,
},
Lint {
+ label: "dropping_copy_types",
+ description: r##"calls to `std::mem::drop` with a value that implements Copy"##,
+ },
+ Lint {
+ label: "dropping_references",
+ description: r##"calls to `std::mem::drop` with a reference instead of an owned value"##,
+ },
+ Lint { label: "duplicate_macro_attributes", description: r##"duplicated attribute"## },
+ Lint {
label: "dyn_drop",
description: r##"trait objects of the form `dyn Drop` are useless"##,
},
Lint {
+ label: "elided_lifetimes_in_associated_constant",
+ description: r##"elided lifetimes cannot be used in associated constants in impls"##,
+ },
+ Lint {
label: "elided_lifetimes_in_paths",
description: r##"hidden lifetime parameters in types are deprecated"##,
},
@@ -113,14 +152,38 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "exported_private_dependencies",
description: r##"public interface leaks type from a private dependency"##,
},
+ Lint {
+ label: "ffi_unwind_calls",
+ description: r##"call to foreign functions or function pointers with FFI-unwind ABI"##,
+ },
+ Lint {
+ label: "for_loops_over_fallibles",
+ description: r##"for-looping over an `Option` or a `Result`, which is more clearly expressed as an `if let`"##,
+ },
Lint { label: "forbidden_lint_groups", description: r##"applying forbid to lint-groups"## },
Lint {
+ label: "forgetting_copy_types",
+ description: r##"calls to `std::mem::forget` with a value that implements Copy"##,
+ },
+ Lint {
+ label: "forgetting_references",
+ description: r##"calls to `std::mem::forget` with a reference instead of an owned value"##,
+ },
+ Lint {
label: "function_item_references",
description: r##"suggest casting to a function pointer when attempting to take references to function items"##,
},
Lint {
label: "future_incompatible",
- description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-alignment, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##,
+ },
+ Lint {
+ label: "fuzzy_provenance_casts",
+ description: r##"a fuzzy integer to pointer cast is used"##,
+ },
+ Lint {
+ label: "hidden_glob_reexports",
+ description: r##"name introduced by a private item shadows a name introduced by a public glob re-export"##,
},
Lint {
label: "ill_formed_attribute_input",
@@ -131,6 +194,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"floating-point literals cannot be used in patterns"##,
},
Lint {
+ label: "implied_bounds_entailment",
+ description: r##"impl method assumes more implied bounds than its corresponding trait method"##,
+ },
+ Lint {
label: "improper_ctypes",
description: r##"proper use of libc types in foreign modules"##,
},
@@ -156,6 +223,14 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects incompatible use of `#[inline(always)]` and `#[no_sanitize(...)]`"##,
},
Lint {
+ label: "internal_features",
+ description: r##"internal features are not supposed to be used"##,
+ },
+ Lint {
+ label: "invalid_alignment",
+ description: r##"raw pointers must be aligned before dereferencing"##,
+ },
+ Lint {
label: "invalid_atomic_ordering",
description: r##"usage of invalid atomic ordering in atomic operations and memory fences"##,
},
@@ -164,6 +239,26 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects invalid `#[doc(...)]` attributes"##,
},
Lint {
+ label: "invalid_from_utf8",
+ description: r##"using a non UTF-8 literal in `std::str::from_utf8`"##,
+ },
+ Lint {
+ label: "invalid_from_utf8_unchecked",
+ description: r##"using a non UTF-8 literal in `std::str::from_utf8_unchecked`"##,
+ },
+ Lint {
+ label: "invalid_macro_export_arguments",
+ description: r##""invalid_parameter" isn't a valid argument for `#[macro_export]`"##,
+ },
+ Lint {
+ label: "invalid_nan_comparisons",
+ description: r##"detects invalid floating point NaN comparisons"##,
+ },
+ Lint {
+ label: "invalid_reference_casting",
+ description: r##"casts of `&T` to `&mut T` without interior mutability"##,
+ },
+ Lint {
label: "invalid_type_param_default",
description: r##"type parameter default erroneously allowed in invalid location"##,
},
@@ -189,6 +284,26 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects derive helper attributes that are used before they are introduced"##,
},
Lint {
+ label: "let_underscore",
+ description: r##"lint group for: let-underscore-drop, let-underscore-lock"##,
+ },
+ Lint {
+ label: "let_underscore_drop",
+ description: r##"non-binding let on a type that implements `Drop`"##,
+ },
+ Lint {
+ label: "let_underscore_lock",
+ description: r##"non-binding let on a synchronization lock"##,
+ },
+ Lint {
+ label: "long_running_const_eval",
+ description: r##"detects long const eval operations"##,
+ },
+ Lint {
+ label: "lossy_provenance_casts",
+ description: r##"a lossy pointer to integer cast is used"##,
+ },
+ Lint {
label: "macro_expanded_macro_exports_accessed_by_absolute_paths",
description: r##"macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths"##,
},
@@ -197,6 +312,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"the `#[macro_use]` attribute is now deprecated in favor of using macros via the module system"##,
},
Lint {
+ label: "map_unit_fn",
+ description: r##"`Iterator::map` call that discard the iterator's values"##,
+ },
+ Lint {
label: "meta_variable_misuse",
description: r##"possible meta-variable misuse at macro definition"##,
},
@@ -222,16 +341,20 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects Unicode scripts whose mixed script confusables codepoints are solely used"##,
},
Lint {
+ label: "multiple_supertrait_upcastable",
+ description: r##"detect when an object-safe trait has multiple supertraits"##,
+ },
+ Lint {
label: "must_not_suspend",
description: r##"use of a `#[must_not_suspend]` value across a yield point"##,
},
Lint {
- label: "mutable_borrow_reservation_conflict",
- description: r##"reservation of a two-phased borrow conflicts with other shared borrows"##,
+ label: "mutable_transmutes",
+ description: r##"transmuting &T to &mut T is undefined behavior, even if the reference is unused"##,
},
Lint {
- label: "mutable_transmutes",
- description: r##"mutating transmuted &mut T from &T may cause undefined behavior"##,
+ label: "named_arguments_used_positionally",
+ description: r##"named arguments in format used positionally"##,
},
Lint { label: "named_asm_labels", description: r##"named labels in inline assembly"## },
Lint {
@@ -277,6 +400,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects the use of well-known noop methods"##,
},
Lint {
+ label: "opaque_hidden_inferred_bound",
+ description: r##"detects the use of nested `impl Trait` types in associated type bounds that are not general enough"##,
+ },
+ Lint {
label: "order_dependent_trait_objects",
description: r##"trait-object types were treated as different depending on marker-trait order"##,
},
@@ -295,8 +422,12 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"pointers are not structural-match"##,
},
Lint {
- label: "private_in_public",
- description: r##"detect private items in public interfaces not caught by the old implementation"##,
+ label: "private_bounds",
+ description: r##"private type in secondary interface of an item"##,
+ },
+ Lint {
+ label: "private_interfaces",
+ description: r##"private type in primary interface of an item"##,
},
Lint {
label: "proc_macro_back_compat",
@@ -315,12 +446,20 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects unnecessary trailing semicolons"##,
},
Lint {
+ label: "refining_impl_trait",
+ description: r##"impl trait in impl method signature does not match trait method signature"##,
+ },
+ Lint {
label: "renamed_and_removed_lints",
description: r##"lints that have been renamed or removed"##,
},
Lint {
+ label: "repr_transparent_external_private_fields",
+ description: r##"transparent type contains an external ZST that is marked #[non_exhaustive] or contains private fields"##,
+ },
+ Lint {
label: "rust_2018_compatibility",
- description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ description: r##"lint group for: keyword-idents, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##,
},
Lint {
label: "rust_2018_idioms",
@@ -328,7 +467,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
},
Lint {
label: "rust_2021_compatibility",
- description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prefixes-incompatible-syntax, rust-2021-prelude-collisions, array-into-iter, non-fmt-panics"##,
},
Lint {
label: "rust_2021_incompatible_closure_captures",
@@ -359,14 +498,30 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"a feature gate that doesn't break dependent crates"##,
},
Lint {
+ label: "special_module_name",
+ description: r##"module declarations for files with a special meaning"##,
+ },
+ Lint {
label: "stable_features",
description: r##"stable features found in `#[feature]` directive"##,
},
Lint {
+ label: "suspicious_auto_trait_impls",
+ description: r##"the rules governing auto traits have recently changed resulting in potential breakage"##,
+ },
+ Lint {
+ label: "suspicious_double_ref_op",
+ description: r##"suspicious call of trait method on `&&T`"##,
+ },
+ Lint {
label: "temporary_cstring_as_ptr",
description: r##"detects getting the inner pointer of a temporary `CString`"##,
},
Lint {
+ label: "test_unstable_lint",
+ description: r##"this unstable lint is only for testing"##,
+ },
+ Lint {
label: "text_direction_codepoint_in_comment",
description: r##"invisible directionality-changing codepoints in comment"##,
},
@@ -395,10 +550,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"raw pointer to an inference variable"##,
},
Lint {
- label: "unaligned_references",
- description: r##"detects unaligned references to fields of packed structs"##,
- },
- Lint {
label: "uncommon_codepoints",
description: r##"detects uncommon Unicode codepoints in identifiers"##,
},
@@ -410,6 +561,26 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "unconditional_recursion",
description: r##"functions that cannot return without calling themselves"##,
},
+ Lint {
+ label: "undefined_naked_function_abi",
+ description: r##"undefined naked function ABI"##,
+ },
+ Lint {
+ label: "undropped_manually_drops",
+ description: r##"calls to `std::mem::drop` with `std::mem::ManuallyDrop` instead of it's inner value"##,
+ },
+ Lint {
+ label: "unexpected_cfgs",
+ description: r##"detects unexpected names and values in `#[cfg]` conditions"##,
+ },
+ Lint {
+ label: "unfulfilled_lint_expectations",
+ description: r##"unfulfilled lint expectation"##,
+ },
+ Lint {
+ label: "ungated_async_fn_track_caller",
+ description: r##"enabling track_caller on an async fn is a no-op unless the async_fn_track_caller feature is enabled"##,
+ },
Lint { label: "uninhabited_static", description: r##"uninhabited static"## },
Lint {
label: "unknown_crate_types",
@@ -417,16 +588,27 @@ pub const DEFAULT_LINTS: &[Lint] = &[
},
Lint { label: "unknown_lints", description: r##"unrecognized lint attribute"## },
Lint {
+ label: "unknown_or_malformed_diagnostic_attributes",
+ description: r##"unrecognized or malformed diagnostic attribute"##,
+ },
+ Lint {
label: "unnameable_test_items",
description: r##"detects an item that cannot be named being marked as `#[test_case]`"##,
},
+ Lint {
+ label: "unnameable_types",
+ description: r##"effective visibility of a type is larger than the area in which it can be named"##,
+ },
Lint { label: "unreachable_code", description: r##"detects unreachable code paths"## },
Lint { label: "unreachable_patterns", description: r##"detects unreachable patterns"## },
Lint {
label: "unreachable_pub",
description: r##"`pub` items not reachable from crate root"##,
},
- Lint { label: "unsafe_code", description: r##"usage of `unsafe` code"## },
+ Lint {
+ label: "unsafe_code",
+ description: r##"usage of `unsafe` code and other potentially unsound constructs"##,
+ },
Lint {
label: "unsafe_op_in_unsafe_fn",
description: r##"unsafe operations in unsafe functions without an explicit unsafe block are deprecated"##,
@@ -440,16 +622,16 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects name collision with an existing but unstable method"##,
},
Lint {
- label: "unsupported_calling_conventions",
- description: r##"use of unsupported calling convention"##,
+ label: "unstable_syntax_pre_expansion",
+ description: r##"unstable syntax can change at any point in the future, causing a hard error!"##,
},
Lint {
- label: "unsupported_naked_functions",
- description: r##"unsupported naked function definitions"##,
+ label: "unsupported_calling_conventions",
+ description: r##"use of unsupported calling convention"##,
},
Lint {
label: "unused",
- description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-macro-rules, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons, map-unit-fn"##,
},
Lint {
label: "unused_allocation",
@@ -460,6 +642,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detect assignments that will never be read"##,
},
Lint {
+ label: "unused_associated_type_bounds",
+ description: r##"detects unused `Foo = Bar` bounds in `dyn Trait<Foo = Bar>`"##,
+ },
+ Lint {
label: "unused_attributes",
description: r##"detects attributes that were not used by the compiler"##,
},
@@ -491,6 +677,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "unused_lifetimes",
description: r##"detects lifetime parameters that are never used"##,
},
+ Lint {
+ label: "unused_macro_rules",
+ description: r##"detects macro rules that were not used"##,
+ },
Lint { label: "unused_macros", description: r##"detects macros that were not used"## },
Lint {
label: "unused_must_use",
@@ -512,6 +702,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
label: "unused_results",
description: r##"unused result of an expression in a statement"##,
},
+ Lint {
+ label: "unused_tuple_struct_fields",
+ description: r##"detects tuple struct fields that are never read"##,
+ },
Lint { label: "unused_unsafe", description: r##"unnecessary use of an `unsafe` block"## },
Lint {
label: "unused_variables",
@@ -522,6 +716,10 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"detects deprecation attributes with no effect"##,
},
Lint {
+ label: "useless_ptr_null_checks",
+ description: r##"useless checking of non-null-typed pointer"##,
+ },
+ Lint {
label: "variant_size_differences",
description: r##"detects enums with widely varying variant sizes"##,
},
@@ -542,51 +740,65 @@ pub const DEFAULT_LINTS: &[Lint] = &[
description: r##"suggest using `loop { }` instead of `while true { }`"##,
},
];
+
pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "future_incompatible",
- description: r##"lint group for: forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, invalid-doc-attributes, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, unsupported-calling-conventions, deref-into-dyn-supertrait"##,
+ description: r##"lint group for: deref-into-dyn-supertrait, ambiguous-associated-items, ambiguous-glob-imports, byte-slice-in-packed-struct-with-derive, cenum-impl-drop-cast, coherence-leak-check, coinductive-overlap-in-coherence, conflicting-repr-hints, const-evaluatable-unchecked, const-patterns-without-partial-eq, deprecated-cfg-attr-crate-type-name, elided-lifetimes-in-associated-constant, forbidden-lint-groups, ill-formed-attribute-input, illegal-floating-point-literal-pattern, implied-bounds-entailment, indirect-structural-match, invalid-alignment, invalid-doc-attributes, invalid-type-param-default, late-bound-lifetime-arguments, legacy-derive-helpers, macro-expanded-macro-exports-accessed-by-absolute-paths, missing-fragment-specifier, nontrivial-structural-match, order-dependent-trait-objects, patterns-in-fns-without-body, pointer-structural-match, proc-macro-back-compat, proc-macro-derive-resolution-fallback, pub-use-of-private-extern-crate, repr-transparent-external-private-fields, semicolon-in-expressions-from-macros, soft-unstable, suspicious-auto-trait-impls, uninhabited-static, unstable-name-collisions, unstable-syntax-pre-expansion, unsupported-calling-conventions, where-clauses-object-safety"##,
},
children: &[
+ "deref_into_dyn_supertrait",
+ "ambiguous_associated_items",
+ "ambiguous_glob_imports",
+ "byte_slice_in_packed_struct_with_derive",
+ "cenum_impl_drop_cast",
+ "coherence_leak_check",
+ "coinductive_overlap_in_coherence",
+ "conflicting_repr_hints",
+ "const_evaluatable_unchecked",
+ "const_patterns_without_partial_eq",
+ "deprecated_cfg_attr_crate_type_name",
+ "elided_lifetimes_in_associated_constant",
"forbidden_lint_groups",
+ "ill_formed_attribute_input",
"illegal_floating_point_literal_pattern",
- "private_in_public",
- "pub_use_of_private_extern_crate",
+ "implied_bounds_entailment",
+ "indirect_structural_match",
+ "invalid_alignment",
+ "invalid_doc_attributes",
"invalid_type_param_default",
- "const_err",
- "unaligned_references",
- "patterns_in_fns_without_body",
- "missing_fragment_specifier",
"late_bound_lifetime_arguments",
- "order_dependent_trait_objects",
- "coherence_leak_check",
- "unstable_name_collisions",
- "where_clauses_object_safety",
- "proc_macro_derive_resolution_fallback",
+ "legacy_derive_helpers",
"macro_expanded_macro_exports_accessed_by_absolute_paths",
- "ill_formed_attribute_input",
- "conflicting_repr_hints",
- "ambiguous_associated_items",
- "mutable_borrow_reservation_conflict",
- "indirect_structural_match",
- "pointer_structural_match",
+ "missing_fragment_specifier",
"nontrivial_structural_match",
+ "order_dependent_trait_objects",
+ "patterns_in_fns_without_body",
+ "pointer_structural_match",
+ "proc_macro_back_compat",
+ "proc_macro_derive_resolution_fallback",
+ "pub_use_of_private_extern_crate",
+ "repr_transparent_external_private_fields",
+ "semicolon_in_expressions_from_macros",
"soft_unstable",
- "cenum_impl_drop_cast",
- "const_evaluatable_unchecked",
+ "suspicious_auto_trait_impls",
"uninhabited_static",
- "unsupported_naked_functions",
- "invalid_doc_attributes",
- "semicolon_in_expressions_from_macros",
- "legacy_derive_helpers",
- "proc_macro_back_compat",
+ "unstable_name_collisions",
+ "unstable_syntax_pre_expansion",
"unsupported_calling_conventions",
- "deref_into_dyn_supertrait",
+ "where_clauses_object_safety",
],
},
LintGroup {
lint: Lint {
+ label: "let_underscore",
+ description: r##"lint group for: let-underscore-drop, let-underscore-lock"##,
+ },
+ children: &["let_underscore_drop", "let_underscore_lock"],
+ },
+ LintGroup {
+ lint: Lint {
label: "nonstandard_style",
description: r##"lint group for: non-camel-case-types, non-snake-case, non-upper-case-globals"##,
},
@@ -595,13 +807,13 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "rust_2018_compatibility",
- description: r##"lint group for: keyword-idents, anonymous-parameters, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate"##,
+ description: r##"lint group for: keyword-idents, anonymous-parameters, absolute-paths-not-starting-with-crate, tyvar-behind-raw-pointer"##,
},
children: &[
"keyword_idents",
"anonymous_parameters",
- "tyvar_behind_raw_pointer",
"absolute_paths_not_starting_with_crate",
+ "tyvar_behind_raw_pointer",
],
},
LintGroup {
@@ -620,15 +832,15 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "rust_2021_compatibility",
- description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prelude-collisions, rust-2021-prefixes-incompatible-syntax, array-into-iter, non-fmt-panics"##,
+ description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects, rust-2021-incompatible-closure-captures, rust-2021-incompatible-or-patterns, rust-2021-prefixes-incompatible-syntax, rust-2021-prelude-collisions, array-into-iter, non-fmt-panics"##,
},
children: &[
"ellipsis_inclusive_range_patterns",
"bare_trait_objects",
"rust_2021_incompatible_closure_captures",
"rust_2021_incompatible_or_patterns",
- "rust_2021_prelude_collisions",
"rust_2021_prefixes_incompatible_syntax",
+ "rust_2021_prelude_collisions",
"array_into_iter",
"non_fmt_panics",
],
@@ -636,7 +848,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "unused",
- description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons"##,
+ description: r##"lint group for: unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-macro-rules, unused-allocation, unused-doc-comments, unused-extern-crates, unused-features, unused-labels, unused-parens, unused-braces, redundant-semicolons, map-unit-fn"##,
},
children: &[
"unused_imports",
@@ -651,6 +863,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
"path_statements",
"unused_attributes",
"unused_macros",
+ "unused_macro_rules",
"unused_allocation",
"unused_doc_comments",
"unused_extern_crates",
@@ -659,6 +872,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
"unused_parens",
"unused_braces",
"redundant_semicolons",
+ "map_unit_fn",
],
},
LintGroup {
@@ -673,7 +887,7 @@ pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &[
pub const RUSTDOC_LINTS: &[Lint] = &[
Lint {
label: "rustdoc::all",
- description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links"##,
},
Lint { label: "rustdoc::bare_urls", description: r##"detects URLs that are not hyperlinks"## },
Lint {
@@ -708,27 +922,70 @@ pub const RUSTDOC_LINTS: &[Lint] = &[
label: "rustdoc::private_intra_doc_links",
description: r##"linking from a public item to a private one"##,
},
+ Lint {
+ label: "rustdoc::redundant_explicit_links",
+ description: r##"detects redundant explicit links in doc comments"##,
+ },
+ Lint {
+ label: "rustdoc::unescaped_backticks",
+ description: r##"detects unescaped backticks in doc comments"##,
+ },
];
+
pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &[LintGroup {
lint: Lint {
label: "rustdoc::all",
- description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::missing-doc-code-examples, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs"##,
+ description: r##"lint group for: rustdoc::broken-intra-doc-links, rustdoc::private-intra-doc-links, rustdoc::private-doc-tests, rustdoc::invalid-codeblock-attributes, rustdoc::invalid-rust-codeblocks, rustdoc::invalid-html-tags, rustdoc::bare-urls, rustdoc::missing-crate-level-docs, rustdoc::unescaped-backticks, rustdoc::redundant-explicit-links"##,
},
children: &[
"rustdoc::broken_intra_doc_links",
"rustdoc::private_intra_doc_links",
- "rustdoc::missing_doc_code_examples",
"rustdoc::private_doc_tests",
"rustdoc::invalid_codeblock_attributes",
"rustdoc::invalid_rust_codeblocks",
"rustdoc::invalid_html_tags",
"rustdoc::bare_urls",
"rustdoc::missing_crate_level_docs",
+ "rustdoc::unescaped_backticks",
+ "rustdoc::redundant_explicit_links",
],
}];
pub const FEATURES: &[Lint] = &[
Lint {
+ label: "aarch64_ver_target_feature",
+ description: r##"# `aarch64_ver_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_amdgpu_kernel",
+ description: r##"# `abi_amdgpu_kernel`
+
+The tracking issue for this feature is: [#51575]
+
+[#51575]: https://github.com/rust-lang/rust/issues/51575
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_avr_interrupt",
+ description: r##"# `abi_avr_interrupt`
+
+The tracking issue for this feature is: [#69664]
+
+[#69664]: https://github.com/rust-lang/rust/issues/69664
+
+------------------------
+"##,
+ },
+ Lint {
label: "abi_c_cmse_nonsecure_call",
description: r##"# `abi_c_cmse_nonsecure_call`
@@ -931,6 +1188,121 @@ $ cat $(find -name '*.s')
"##,
},
Lint {
+ label: "abi_riscv_interrupt",
+ description: r##"# `abi_riscv_interrupt`
+
+The tracking issue for this feature is: [#111889]
+
+[#111889]: https://github.com/rust-lang/rust/issues/111889
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_unadjusted",
+ description: r##"# `abi_unadjusted`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_vectorcall",
+ description: r##"# `abi_vectorcall`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "abi_x86_interrupt",
+ description: r##"# `abi_x86_interrupt`
+
+The tracking issue for this feature is: [#40180]
+
+[#40180]: https://github.com/rust-lang/rust/issues/40180
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "absolute_path",
+ description: r##"# `absolute_path`
+
+The tracking issue for this feature is: [#92750]
+
+[#92750]: https://github.com/rust-lang/rust/issues/92750
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "addr_parse_ascii",
+ description: r##"# `addr_parse_ascii`
+
+The tracking issue for this feature is: [#101035]
+
+[#101035]: https://github.com/rust-lang/rust/issues/101035
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "adt_const_params",
+ description: r##"# `adt_const_params`
+
+The tracking issue for this feature is: [#95174]
+
+[#95174]: https://github.com/rust-lang/rust/issues/95174
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_error_handler",
+ description: r##"# `alloc_error_handler`
+
+The tracking issue for this feature is: [#51540]
+
+[#51540]: https://github.com/rust-lang/rust/issues/51540
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_error_hook",
+ description: r##"# `alloc_error_hook`
+
+The tracking issue for this feature is: [#51245]
+
+[#51245]: https://github.com/rust-lang/rust/issues/51245
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_internals",
+ description: r##"# `alloc_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "alloc_layout_extra",
+ description: r##"# `alloc_layout_extra`
+
+The tracking issue for this feature is: [#55724]
+
+[#55724]: https://github.com/rust-lang/rust/issues/55724
+
+------------------------
+"##,
+ },
+ Lint {
label: "allocator_api",
description: r##"# `allocator_api`
@@ -961,53 +1333,171 @@ compiler.
"##,
},
Lint {
- label: "arbitrary_enum_discriminant",
- description: r##"# `arbitrary_enum_discriminant`
+ label: "allow_internal_unsafe",
+ description: r##"# `allow_internal_unsafe`
-The tracking issue for this feature is: [#60553]
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
-[#60553]: https://github.com/rust-lang/rust/issues/60553
+------------------------
+"##,
+ },
+ Lint {
+ label: "allow_internal_unstable",
+ description: r##"# `allow_internal_unstable`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
+"##,
+ },
+ Lint {
+ label: "anonymous_lifetime_in_impl_trait",
+ description: r##"# `anonymous_lifetime_in_impl_trait`
-The `arbitrary_enum_discriminant` feature permits tuple-like and
-struct-like enum variants with `#[repr(<int-type>)]` to have explicit discriminants.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
-## Examples
+------------------------
+"##,
+ },
+ Lint {
+ label: "arbitrary_self_types",
+ description: r##"# `arbitrary_self_types`
-```rust
-#![feature(arbitrary_enum_discriminant)]
-
-#[allow(dead_code)]
-#[repr(u8)]
-enum Enum {
- Unit = 3,
- Tuple(u16) = 2,
- Struct {
- a: u8,
- b: u16,
- } = 1,
-}
+The tracking issue for this feature is: [#44874]
-impl Enum {
- fn tag(&self) -> u8 {
- unsafe { *(self as *const Self as *const u8) }
- }
-}
+[#44874]: https://github.com/rust-lang/rust/issues/44874
-assert_eq!(3, Enum::Unit.tag());
-assert_eq!(2, Enum::Tuple(5).tag());
-assert_eq!(1, Enum::Struct{a: 7, b: 11}.tag());
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "arc_unwrap_or_clone",
+ description: r##"# `arc_unwrap_or_clone`
+
+The tracking issue for this feature is: [#93610]
+
+[#93610]: https://github.com/rust-lang/rust/issues/93610
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "arm_target_feature",
+ description: r##"# `arm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_chunks",
+ description: r##"# `array_chunks`
+
+The tracking issue for this feature is: [#74985]
+
+[#74985]: https://github.com/rust-lang/rust/issues/74985
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_into_iter_constructors",
+ description: r##"# `array_into_iter_constructors`
+
+The tracking issue for this feature is: [#91583]
+
+[#91583]: https://github.com/rust-lang/rust/issues/91583
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_methods",
+ description: r##"# `array_methods`
+
+The tracking issue for this feature is: [#76118]
+
+[#76118]: https://github.com/rust-lang/rust/issues/76118
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_try_from_fn",
+ description: r##"# `array_try_from_fn`
+
+The tracking issue for this feature is: [#89379]
+
+[#89379]: https://github.com/rust-lang/rust/issues/89379
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_try_map",
+ description: r##"# `array_try_map`
+
+The tracking issue for this feature is: [#79711]
+
+[#79711]: https://github.com/rust-lang/rust/issues/79711
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "array_windows",
+ description: r##"# `array_windows`
+
+The tracking issue for this feature is: [#75027]
+
+[#75027]: https://github.com/rust-lang/rust/issues/75027
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "as_array_of_cells",
+ description: r##"# `as_array_of_cells`
+
+The tracking issue for this feature is: [#88248]
+
+[#88248]: https://github.com/rust-lang/rust/issues/88248
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ascii_char",
+ description: r##"# `ascii_char`
+
+The tracking issue for this feature is: [#110998]
+
+[#110998]: https://github.com/rust-lang/rust/issues/110998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ascii_char_variants",
+ description: r##"# `ascii_char_variants`
+
+The tracking issue for this feature is: [#110998]
+
+[#110998]: https://github.com/rust-lang/rust/issues/110998
+
+------------------------
"##,
},
Lint {
label: "asm_const",
description: r##"# `asm_const`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#93332]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#93332]: https://github.com/rust-lang/rust/issues/93332
------------------------
@@ -1020,9 +1510,9 @@ This feature adds a `const <expr>` operand type to `asm!` and `global_asm!`.
label: "asm_experimental_arch",
description: r##"# `asm_experimental_arch`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#93335]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#93335]: https://github.com/rust-lang/rust/issues/93335
------------------------
@@ -1035,6 +1525,10 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
- BPF
- SPIR-V
- AVR
+- MSP430
+- M68k
+- CSKY
+- s390x
## Register classes
@@ -1059,6 +1553,14 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| AVR | `reg_pair` | `r3r2` .. `r25r24`, `X`, `Z` | `r` |
| AVR | `reg_iw` | `r25r24`, `X`, `Z` | `w` |
| AVR | `reg_ptr` | `X`, `Z` | `e` |
+| MSP430 | `reg` | `r[0-15]` | `r` |
+| M68k | `reg` | `d[0-7]`, `a[0-7]` | `r` |
+| M68k | `reg_data` | `d[0-7]` | `d` |
+| M68k | `reg_addr` | `a[0-3]` | `a` |
+| CSKY | `reg` | `r[0-31]` | `r` |
+| CSKY | `freg` | `f[0-31]` | `f` |
+| s390x | `reg` | `r[0-10]`, `r[12-14]` | `r` |
+| s390x | `freg` | `f[0-15]` | `f` |
> **Notes**:
> - NVPTX doesn't have a fixed register set, so named registers are not supported.
@@ -1087,6 +1589,13 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| BPF | `wreg` | `alu32` | `i8` `i16` `i32` |
| AVR | `reg`, `reg_upper` | None | `i8` |
| AVR | `reg_pair`, `reg_iw`, `reg_ptr` | None | `i16` |
+| MSP430 | `reg` | None | `i8`, `i16` |
+| M68k | `reg`, `reg_addr` | None | `i16`, `i32` |
+| M68k | `reg_data` | None | `i8`, `i16`, `i32` |
+| CSKY | `reg` | None | `i8`, `i16`, `i32` |
+| CSKY | `freg` | None | `f32`, |
+| s390x | `reg` | None | `i8`, `i16`, `i32`, `i64` |
+| s390x | `freg` | None | `f32`, `f64` |
## Register aliases
@@ -1100,13 +1609,36 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| AVR | `XL` | `r26` |
| AVR | `ZH` | `r31` |
| AVR | `ZL` | `r30` |
+| MSP430 | `r0` | `pc` |
+| MSP430 | `r1` | `sp` |
+| MSP430 | `r2` | `sr` |
+| MSP430 | `r3` | `cg` |
+| MSP430 | `r4` | `fp` |
+| M68k | `a5` | `bp` |
+| M68k | `a6` | `fp` |
+| M68k | `a7` | `sp`, `usp`, `ssp`, `isp` |
+| CSKY | `r[0-3]` | `a[0-3]` |
+| CSKY | `r[4-11]` | `l[0-7]` |
+| CSKY | `r[12-13]` | `t[0-1]` |
+| CSKY | `r14` | `sp` |
+| CSKY | `r15` | `lr` |
+| CSKY | `r[16-17]` | `l[8-9]` |
+| CSKY | `r[18-25]` | `t[2-9]` |
+| CSKY | `r28` | `rgb` |
+| CSKY | `r29` | `rtb` |
+| CSKY | `r30` | `svbr` |
+| CSKY | `r31` | `tls` |
+
+> **Notes**:
+> - TI does not mandate a frame pointer for MSP430, but toolchains are allowed
+ to use one; LLVM uses `r4`.
## Unsupported registers
| Architecture | Unsupported register | Reason |
| ------------ | --------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| All | `sp` | The stack pointer must be restored to its original value at the end of an asm code block. |
-| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR) | The frame pointer cannot be used as an input or output. |
+| All | `sp`, `r15` (s390x) | The stack pointer must be restored to its original value at the end of an asm code block. |
+| All | `fr` (Hexagon), `$fp` (MIPS), `Y` (AVR), `r4` (MSP430), `a6` (M68k), `r11` (s390x) | The frame pointer cannot be used as an input or output. |
| All | `r19` (Hexagon) | This is used internally by LLVM as a "base pointer" for functions with complex stack frames. |
| MIPS | `$0` or `$zero` | This is a constant zero register which can't be modified. |
| MIPS | `$1` or `$at` | Reserved for assembler. |
@@ -1115,6 +1647,15 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| MIPS | `$ra` | Return address cannot be used as inputs or outputs. |
| Hexagon | `lr` | This is the link register which cannot be used as an input or output. |
| AVR | `r0`, `r1`, `r1r0` | Due to an issue in LLVM, the `r0` and `r1` registers cannot be used as inputs or outputs. If modified, they must be restored to their original values before the end of the block. |
+|MSP430 | `r0`, `r2`, `r3` | These are the program counter, status register, and constant generator respectively. Neither the status register nor constant generator can be written to. |
+| M68k | `a4`, `a5` | Used internally by LLVM for the base pointer and global base pointer. |
+| CSKY | `r7`, `r28` | Used internally by LLVM for the base pointer and global base pointer. |
+| CSKY | `r8` | Used internally by LLVM for the frame pointer. |
+| CSKY | `r14` | Used internally by LLVM for the stack pointer. |
+| CSKY | `r15` | This is the link register. |
+| CSKY | `r[26-30]` | Reserved by its ABI. |
+| CSKY | `r31` | This is the TLS register. |
+
## Template modifiers
@@ -1129,42 +1670,167 @@ This feature tracks `asm!` and `global_asm!` support for the following architect
| PowerPC | `reg` | None | `0` | None |
| PowerPC | `reg_nonzero` | None | `3` | `b` |
| PowerPC | `freg` | None | `0` | None |
+| s390x | `reg` | None | `%r0` | None |
+| s390x | `freg` | None | `%f0` | None |
+| CSKY | `reg` | None | `r0` | None |
+| CSKY | `freg` | None | `f0` | None |
# Flags covered by `preserves_flags`
These flags registers must be restored upon exiting the asm block if the `preserves_flags` option is set:
- AVR
- The status register `SREG`.
+- MSP430
+ - The status register `r2`.
+- M68k
+ - The condition code register `ccr`.
+- s390x
+ - The condition code register `cc`.
"##,
},
Lint {
- label: "asm_sym",
- description: r##"# `asm_sym`
+ label: "asm_unwind",
+ description: r##"# `asm_unwind`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#93334]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#93334]: https://github.com/rust-lang/rust/issues/93334
------------------------
-This feature adds a `sym <path>` operand type to `asm!` and `global_asm!`.
-- `<path>` must refer to a `fn` or `static`.
-- A mangled symbol name referring to the item is substituted into the asm template string.
-- The substituted string does not include any modifiers (e.g. GOT, PLT, relocations, etc).
-- `<path>` is allowed to point to a `#[thread_local]` static, in which case the asm code can combine the symbol with relocations (e.g. `@plt`, `@TPOFF`) to read from thread-local data.
+This feature adds a `may_unwind` option to `asm!` which allows an `asm` block to unwind stack and be part of the stack unwinding process. This option is only supported by the LLVM backend right now.
"##,
},
Lint {
- label: "asm_unwind",
- description: r##"# `asm_unwind`
+ label: "assert_matches",
+ description: r##"# `assert_matches`
-The tracking issue for this feature is: [#72016]
+The tracking issue for this feature is: [#82775]
-[#72016]: https://github.com/rust-lang/rust/issues/72016
+[#82775]: https://github.com/rust-lang/rust/issues/82775
------------------------
+"##,
+ },
+ Lint {
+ label: "associated_const_equality",
+ description: r##"# `associated_const_equality`
-This feature adds a `may_unwind` option to `asm!` which allows an `asm` block to unwind stack and be part of the stack unwinding process. This option is only supported by the LLVM backend right now.
+The tracking issue for this feature is: [#92827]
+
+[#92827]: https://github.com/rust-lang/rust/issues/92827
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "associated_type_bounds",
+ description: r##"# `associated_type_bounds`
+
+The tracking issue for this feature is: [#52662]
+
+[#52662]: https://github.com/rust-lang/rust/issues/52662
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "associated_type_defaults",
+ description: r##"# `associated_type_defaults`
+
+The tracking issue for this feature is: [#29661]
+
+[#29661]: https://github.com/rust-lang/rust/issues/29661
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_closure",
+ description: r##"# `async_closure`
+
+The tracking issue for this feature is: [#62290]
+
+[#62290]: https://github.com/rust-lang/rust/issues/62290
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_fn_in_trait",
+ description: r##"# `async_fn_in_trait`
+
+The tracking issue for this feature is: [#91611]
+
+[#91611]: https://github.com/rust-lang/rust/issues/91611
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_fn_track_caller",
+ description: r##"# `async_fn_track_caller`
+
+The tracking issue for this feature is: [#110011]
+
+[#110011]: https://github.com/rust-lang/rust/issues/110011
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_iter_from_iter",
+ description: r##"# `async_iter_from_iter`
+
+The tracking issue for this feature is: [#81798]
+
+[#81798]: https://github.com/rust-lang/rust/issues/81798
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "async_iterator",
+ description: r##"# `async_iterator`
+
+The tracking issue for this feature is: [#79024]
+
+[#79024]: https://github.com/rust-lang/rust/issues/79024
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "atomic_bool_fetch_not",
+ description: r##"# `atomic_bool_fetch_not`
+
+The tracking issue for this feature is: [#98485]
+
+[#98485]: https://github.com/rust-lang/rust/issues/98485
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "atomic_from_mut",
+ description: r##"# `atomic_from_mut`
+
+The tracking issue for this feature is: [#76314]
+
+[#76314]: https://github.com/rust-lang/rust/issues/76314
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "atomic_from_ptr",
+ description: r##"# `atomic_from_ptr`
+
+The tracking issue for this feature is: [#108652]
+
+[#108652]: https://github.com/rust-lang/rust/issues/108652
+
+------------------------
"##,
},
Lint {
@@ -1184,8 +1850,8 @@ that are automatically implemented for every type, unless the type, or a type it
has explicitly opted out via a negative impl. (Negative impls are separately controlled
by the `negative_impls` feature.)
-[`Send`]: https://doc.rust-lang.org/std/marker/trait.Send.html
-[`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
+[`Send`]: ../../std/marker/trait.Send.html
+[`Sync`]: ../../std/marker/trait.Sync.html
```rust,ignore (partial-example)
impl !Trait for Type {}
@@ -1278,6 +1944,116 @@ Auto traits cannot have supertraits. This is for soundness reasons, as the inter
"##,
},
Lint {
+ label: "avx512_target_feature",
+ description: r##"# `avx512_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "backtrace_frames",
+ description: r##"# `backtrace_frames`
+
+The tracking issue for this feature is: [#79676]
+
+[#79676]: https://github.com/rust-lang/rust/issues/79676
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "bigint_helper_methods",
+ description: r##"# `bigint_helper_methods`
+
+The tracking issue for this feature is: [#85532]
+
+[#85532]: https://github.com/rust-lang/rust/issues/85532
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "binary_heap_as_slice",
+ description: r##"# `binary_heap_as_slice`
+
+The tracking issue for this feature is: [#83659]
+
+[#83659]: https://github.com/rust-lang/rust/issues/83659
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "binary_heap_drain_sorted",
+ description: r##"# `binary_heap_drain_sorted`
+
+The tracking issue for this feature is: [#59278]
+
+[#59278]: https://github.com/rust-lang/rust/issues/59278
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "binary_heap_into_iter_sorted",
+ description: r##"# `binary_heap_into_iter_sorted`
+
+The tracking issue for this feature is: [#59278]
+
+[#59278]: https://github.com/rust-lang/rust/issues/59278
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "bound_as_ref",
+ description: r##"# `bound_as_ref`
+
+The tracking issue for this feature is: [#80996]
+
+[#80996]: https://github.com/rust-lang/rust/issues/80996
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "bound_map",
+ description: r##"# `bound_map`
+
+The tracking issue for this feature is: [#86026]
+
+[#86026]: https://github.com/rust-lang/rust/issues/86026
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "box_into_boxed_slice",
+ description: r##"# `box_into_boxed_slice`
+
+The tracking issue for this feature is: [#71582]
+
+[#71582]: https://github.com/rust-lang/rust/issues/71582
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "box_into_inner",
+ description: r##"# `box_into_inner`
+
+The tracking issue for this feature is: [#80437]
+
+[#80437]: https://github.com/rust-lang/rust/issues/80437
+
+------------------------
+"##,
+ },
+ Lint {
label: "box_patterns",
description: r##"# `box_patterns`
@@ -1285,8 +2061,6 @@ The tracking issue for this feature is: [#29641]
[#29641]: https://github.com/rust-lang/rust/issues/29641
-See also [`box_syntax`](box-syntax.md)
-
------------------------
Box patterns let you match on `Box<T>`s:
@@ -1299,10 +2073,10 @@ fn main() {
let b = Some(Box::new(5));
match b {
Some(box n) if n < 0 => {
- println!("Box contains negative number {}", n);
+ println!("Box contains negative number {n}");
},
Some(box n) if n >= 0 => {
- println!("Box contains non-negative number {}", n);
+ println!("Box contains non-negative number {n}");
},
None => {
println!("No box");
@@ -1314,29 +2088,102 @@ fn main() {
"##,
},
Lint {
- label: "box_syntax",
- description: r##"# `box_syntax`
+ label: "bpf_target_feature",
+ description: r##"# `bpf_target_feature`
-The tracking issue for this feature is: [#49733]
+The tracking issue for this feature is: [#44839]
-[#49733]: https://github.com/rust-lang/rust/issues/49733
+[#44839]: https://github.com/rust-lang/rust/issues/44839
-See also [`box_patterns`](box-patterns.md)
+------------------------
+"##,
+ },
+ Lint {
+ label: "btree_cursors",
+ description: r##"# `btree_cursors`
+
+The tracking issue for this feature is: [#107540]
+
+[#107540]: https://github.com/rust-lang/rust/issues/107540
------------------------
+"##,
+ },
+ Lint {
+ label: "btree_extract_if",
+ description: r##"# `btree_extract_if`
-Currently the only stable way to create a `Box` is via the `Box::new` method.
-Also it is not possible in stable Rust to destructure a `Box` in a match
-pattern. The unstable `box` keyword can be used to create a `Box`. An example
-usage would be:
+The tracking issue for this feature is: [#70530]
-```rust
-#![feature(box_syntax)]
+[#70530]: https://github.com/rust-lang/rust/issues/70530
-fn main() {
- let b = box 5;
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "btreemap_alloc",
+ description: r##"# `btreemap_alloc`
+
+The tracking issue for this feature is: [#32838]
+
+[#32838]: https://github.com/rust-lang/rust/issues/32838
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "buf_read_has_data_left",
+ description: r##"# `buf_read_has_data_left`
+
+The tracking issue for this feature is: [#86423]
+
+[#86423]: https://github.com/rust-lang/rust/issues/86423
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "builtin_syntax",
+ description: r##"# `builtin_syntax`
+
+The tracking issue for this feature is: [#110680]
+
+[#110680]: https://github.com/rust-lang/rust/issues/110680
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "byte_slice_trim_ascii",
+ description: r##"# `byte_slice_trim_ascii`
+
+The tracking issue for this feature is: [#94035]
+
+[#94035]: https://github.com/rust-lang/rust/issues/94035
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "c_size_t",
+ description: r##"# `c_size_t`
+
+The tracking issue for this feature is: [#88345]
+
+[#88345]: https://github.com/rust-lang/rust/issues/88345
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "c_str_literals",
+ description: r##"# `c_str_literals`
+
+The tracking issue for this feature is: [#105723]
+
+[#105723]: https://github.com/rust-lang/rust/issues/105723
+
+------------------------
"##,
},
Lint {
@@ -1349,9 +2196,20 @@ The tracking issue for this feature is: [#74990]
------------------------
-Introduces four new ABI strings: "C-unwind", "stdcall-unwind",
-"thiscall-unwind", and "system-unwind". These enable unwinding from other
-languages (such as C++) into Rust frames and from Rust into other languages.
+Introduces new ABI strings:
+- "C-unwind"
+- "cdecl-unwind"
+- "stdcall-unwind"
+- "fastcall-unwind"
+- "vectorcall-unwind"
+- "thiscall-unwind"
+- "aapcs-unwind"
+- "win64-unwind"
+- "sysv64-unwind"
+- "system-unwind"
+
+These enable unwinding from other languages (such as C++) into Rust frames and
+from Rust into other languages.
See [RFC 2945] for more information.
@@ -1369,7 +2227,7 @@ The tracking issue for this feature is: [#44930]
------------------------
The `c_variadic` language feature enables C-variadic functions to be
-defined in Rust. The may be called both from within Rust and via FFI.
+defined in Rust. They may be called both from within Rust and via FFI.
## Examples
@@ -1426,45 +2284,91 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "cfg_panic",
- description: r##"# `cfg_panic`
+ label: "can_vector",
+ description: r##"# `can_vector`
-The tracking issue for this feature is: [#77443]
+The tracking issue for this feature is: [#69941]
-[#77443]: https://github.com/rust-lang/rust/issues/77443
+[#69941]: https://github.com/rust-lang/rust/issues/69941
------------------------
+"##,
+ },
+ Lint {
+ label: "cell_leak",
+ description: r##"# `cell_leak`
-The `cfg_panic` feature makes it possible to execute different code
-depending on the panic strategy.
+The tracking issue for this feature is: [#69099]
-Possible values at the moment are `"unwind"` or `"abort"`, although
-it is possible that new panic strategies may be added to Rust in the
-future.
+[#69099]: https://github.com/rust-lang/rust/issues/69099
-## Examples
+------------------------
+"##,
+ },
+ Lint {
+ label: "cell_update",
+ description: r##"# `cell_update`
-```rust
-#![feature(cfg_panic)]
+The tracking issue for this feature is: [#50186]
-#[cfg(panic = "unwind")]
-fn a() {
- // ...
-}
+[#50186]: https://github.com/rust-lang/rust/issues/50186
-#[cfg(not(panic = "unwind"))]
-fn a() {
- // ...
-}
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_accessible",
+ description: r##"# `cfg_accessible`
-fn b() {
- if cfg!(panic = "abort") {
- // ...
- } else {
- // ...
- }
-}
-```
+The tracking issue for this feature is: [#64797]
+
+[#64797]: https://github.com/rust-lang/rust/issues/64797
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_eval",
+ description: r##"# `cfg_eval`
+
+The tracking issue for this feature is: [#82679]
+
+[#82679]: https://github.com/rust-lang/rust/issues/82679
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_match",
+ description: r##"# `cfg_match`
+
+The tracking issue for this feature is: [#115585]
+
+[#115585]: https://github.com/rust-lang/rust/issues/115585
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_overflow_checks",
+ description: r##"# `cfg_overflow_checks`
+
+The tracking issue for this feature is: [#111466]
+
+[#111466]: https://github.com/rust-lang/rust/issues/111466
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_relocation_model",
+ description: r##"# `cfg_relocation_model`
+
+The tracking issue for this feature is: [#114929]
+
+[#114929]: https://github.com/rust-lang/rust/issues/114929
+
+------------------------
"##,
},
Lint {
@@ -1506,6 +2410,61 @@ fn b() {
"##,
},
Lint {
+ label: "cfg_target_abi",
+ description: r##"# `cfg_target_abi`
+
+The tracking issue for this feature is: [#80970]
+
+[#80970]: https://github.com/rust-lang/rust/issues/80970
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_compact",
+ description: r##"# `cfg_target_compact`
+
+The tracking issue for this feature is: [#96901]
+
+[#96901]: https://github.com/rust-lang/rust/issues/96901
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_has_atomic",
+ description: r##"# `cfg_target_has_atomic`
+
+The tracking issue for this feature is: [#94039]
+
+[#94039]: https://github.com/rust-lang/rust/issues/94039
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_has_atomic_equal_alignment",
+ description: r##"# `cfg_target_has_atomic_equal_alignment`
+
+The tracking issue for this feature is: [#93822]
+
+[#93822]: https://github.com/rust-lang/rust/issues/93822
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cfg_target_thread_local",
+ description: r##"# `cfg_target_thread_local`
+
+The tracking issue for this feature is: [#29594]
+
+[#29594]: https://github.com/rust-lang/rust/issues/29594
+
+------------------------
+"##,
+ },
+ Lint {
label: "cfg_version",
description: r##"# `cfg_version`
@@ -1545,10 +2504,72 @@ fn b() {
"##,
},
Lint {
- label: "char_error_internals",
- description: r##"# `char_error_internals`
+ label: "cfi_encoding",
+ description: r##"# `cfi_encoding`
-This feature is internal to the Rust compiler and is not intended for general use.
+The tracking issue for this feature is: [#89653]
+
+[#89653]: https://github.com/rust-lang/rust/issues/89653
+
+------------------------
+
+The `cfi_encoding` feature allows the user to define a CFI encoding for a type.
+It allows the user to use a different names for types that otherwise would be
+required to have the same name as used in externally defined C functions.
+
+## Examples
+
+```rust
+#![feature(cfi_encoding, extern_types)]
+
+#[cfi_encoding = "3Foo"]
+pub struct Type1(i32);
+
+extern {
+ #[cfi_encoding = "3Bar"]
+ type Type2;
+}
+```
+"##,
+ },
+ Lint {
+ label: "char_indices_offset",
+ description: r##"# `char_indices_offset`
+
+The tracking issue for this feature is: [#83871]
+
+[#83871]: https://github.com/rust-lang/rust/issues/83871
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "char_internals",
+ description: r##"# `char_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "char_min",
+ description: r##"# `char_min`
+
+The tracking issue for this feature is: [#114298]
+
+[#114298]: https://github.com/rust-lang/rust/issues/114298
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "closure_lifetime_binder",
+ description: r##"# `closure_lifetime_binder`
+
+The tracking issue for this feature is: [#97362]
+
+[#97362]: https://github.com/rust-lang/rust/issues/97362
------------------------
"##,
@@ -1570,6 +2591,17 @@ available through `std::panic::Location::caller()`, just like using
"##,
},
Lint {
+ label: "cmp_minmax",
+ description: r##"# `cmp_minmax`
+
+The tracking issue for this feature is: [#115939]
+
+[#115939]: https://github.com/rust-lang/rust/issues/115939
+
+------------------------
+"##,
+ },
+ Lint {
label: "cmse_nonsecure_entry",
description: r##"# `cmse_nonsecure_entry`
@@ -1655,6 +2687,28 @@ $ arm-none-eabi-objdump -D function.o
"##,
},
Lint {
+ label: "coerce_unsized",
+ description: r##"# `coerce_unsized`
+
+The tracking issue for this feature is: [#18598]
+
+[#18598]: https://github.com/rust-lang/rust/issues/18598
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "collapse_debuginfo",
+ description: r##"# `collapse_debuginfo`
+
+The tracking issue for this feature is: [#100758]
+
+[#100758]: https://github.com/rust-lang/rust/issues/100758
+
+------------------------
+"##,
+ },
+ Lint {
label: "compiler_builtins",
description: r##"# `compiler_builtins`
@@ -1664,6 +2718,17 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "concat_bytes",
+ description: r##"# `concat_bytes`
+
+The tracking issue for this feature is: [#87555]
+
+[#87555]: https://github.com/rust-lang/rust/issues/87555
+
+------------------------
+"##,
+ },
+ Lint {
label: "concat_idents",
description: r##"# `concat_idents`
@@ -1690,14 +2755,1027 @@ fn main() {
"##,
},
Lint {
- label: "const_eval_limit",
- description: r##"# `const_eval_limit`
+ label: "const_align_of_val",
+ description: r##"# `const_align_of_val`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_align_of_val_raw",
+ description: r##"# `const_align_of_val_raw`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_align_offset",
+ description: r##"# `const_align_offset`
+
+The tracking issue for this feature is: [#90962]
+
+[#90962]: https://github.com/rust-lang/rust/issues/90962
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_alloc_error",
+ description: r##"# `const_alloc_error`
+
+The tracking issue for this feature is: [#92523]
+
+[#92523]: https://github.com/rust-lang/rust/issues/92523
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_alloc_layout",
+ description: r##"# `const_alloc_layout`
+
+The tracking issue for this feature is: [#67521]
+
+[#67521]: https://github.com/rust-lang/rust/issues/67521
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_arguments_as_str",
+ description: r##"# `const_arguments_as_str`
+
+The tracking issue for this feature is: [#103900]
+
+[#103900]: https://github.com/rust-lang/rust/issues/103900
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_array_from_ref",
+ description: r##"# `const_array_from_ref`
+
+The tracking issue for this feature is: [#90206]
+
+[#90206]: https://github.com/rust-lang/rust/issues/90206
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_array_into_iter_constructors",
+ description: r##"# `const_array_into_iter_constructors`
+
+The tracking issue for this feature is: [#91583]
+
+[#91583]: https://github.com/rust-lang/rust/issues/91583
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_assert_type2",
+ description: r##"# `const_assert_type2`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_assume",
+ description: r##"# `const_assume`
+
+The tracking issue for this feature is: [#76972]
+
+[#76972]: https://github.com/rust-lang/rust/issues/76972
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_async_blocks",
+ description: r##"# `const_async_blocks`
+
+The tracking issue for this feature is: [#85368]
+
+[#85368]: https://github.com/rust-lang/rust/issues/85368
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_bigint_helper_methods",
+ description: r##"# `const_bigint_helper_methods`
+
+The tracking issue for this feature is: [#85532]
+
+[#85532]: https://github.com/rust-lang/rust/issues/85532
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_black_box",
+ description: r##"# `const_black_box`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_box",
+ description: r##"# `const_box`
+
+The tracking issue for this feature is: [#92521]
+
+[#92521]: https://github.com/rust-lang/rust/issues/92521
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_btree_len",
+ description: r##"# `const_btree_len`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_caller_location",
+ description: r##"# `const_caller_location`
+
+The tracking issue for this feature is: [#76156]
+
+[#76156]: https://github.com/rust-lang/rust/issues/76156
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_cell_into_inner",
+ description: r##"# `const_cell_into_inner`
+
+The tracking issue for this feature is: [#78729]
+
+[#78729]: https://github.com/rust-lang/rust/issues/78729
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_char_from_u32_unchecked",
+ description: r##"# `const_char_from_u32_unchecked`
+
+The tracking issue for this feature is: [#89259]
+
+[#89259]: https://github.com/rust-lang/rust/issues/89259
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_closures",
+ description: r##"# `const_closures`
+
+The tracking issue for this feature is: [#106003]
+
+[#106003]: https://github.com/rust-lang/rust/issues/106003
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_collections_with_hasher",
+ description: r##"# `const_collections_with_hasher`
+
+The tracking issue for this feature is: [#102575]
+
+[#102575]: https://github.com/rust-lang/rust/issues/102575
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_cow_is_borrowed",
+ description: r##"# `const_cow_is_borrowed`
+
+The tracking issue for this feature is: [#65143]
+
+[#65143]: https://github.com/rust-lang/rust/issues/65143
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_cstr_from_ptr",
+ description: r##"# `const_cstr_from_ptr`
+
+The tracking issue for this feature is: [#113219]
+
+[#113219]: https://github.com/rust-lang/rust/issues/113219
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_discriminant",
+ description: r##"# `const_discriminant`
+
+The tracking issue for this feature is: [#69821]
+
+[#69821]: https://github.com/rust-lang/rust/issues/69821
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_eval_select",
+ description: r##"# `const_eval_select`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_exact_div",
+ description: r##"# `const_exact_div`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_extern_fn",
+ description: r##"# `const_extern_fn`
+
+The tracking issue for this feature is: [#64926]
+
+[#64926]: https://github.com/rust-lang/rust/issues/64926
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_float_bits_conv",
+ description: r##"# `const_float_bits_conv`
+
+The tracking issue for this feature is: [#72447]
+
+[#72447]: https://github.com/rust-lang/rust/issues/72447
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_float_classify",
+ description: r##"# `const_float_classify`
+
+The tracking issue for this feature is: [#72505]
+
+[#72505]: https://github.com/rust-lang/rust/issues/72505
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_fmt_arguments_new",
+ description: r##"# `const_fmt_arguments_new`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_fn_floating_point_arithmetic",
+ description: r##"# `const_fn_floating_point_arithmetic`
+
+The tracking issue for this feature is: [#57241]
+
+[#57241]: https://github.com/rust-lang/rust/issues/57241
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_for",
+ description: r##"# `const_for`
+
+The tracking issue for this feature is: [#87575]
+
+[#87575]: https://github.com/rust-lang/rust/issues/87575
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_format_args",
+ description: r##"# `const_format_args`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_hash",
+ description: r##"# `const_hash`
+
+The tracking issue for this feature is: [#104061]
+
+[#104061]: https://github.com/rust-lang/rust/issues/104061
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_heap",
+ description: r##"# `const_heap`
+
+The tracking issue for this feature is: [#79597]
+
+[#79597]: https://github.com/rust-lang/rust/issues/79597
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_index_range_slice_index",
+ description: r##"# `const_index_range_slice_index`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_inherent_unchecked_arith",
+ description: r##"# `const_inherent_unchecked_arith`
+
+The tracking issue for this feature is: [#85122]
+
+[#85122]: https://github.com/rust-lang/rust/issues/85122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_int_unchecked_arith",
+ description: r##"# `const_int_unchecked_arith`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intoiterator_identity",
+ description: r##"# `const_intoiterator_identity`
+
+The tracking issue for this feature is: [#90603]
+
+[#90603]: https://github.com/rust-lang/rust/issues/90603
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intrinsic_compare_bytes",
+ description: r##"# `const_intrinsic_compare_bytes`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intrinsic_forget",
+ description: r##"# `const_intrinsic_forget`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_intrinsic_raw_eq",
+ description: r##"# `const_intrinsic_raw_eq`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_io_structs",
+ description: r##"# `const_io_structs`
+
+The tracking issue for this feature is: [#78812]
+
+[#78812]: https://github.com/rust-lang/rust/issues/78812
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ip",
+ description: r##"# `const_ip`
+
+The tracking issue for this feature is: [#76205]
+
+[#76205]: https://github.com/rust-lang/rust/issues/76205
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ipv4",
+ description: r##"# `const_ipv4`
+
+The tracking issue for this feature is: [#76205]
+
+[#76205]: https://github.com/rust-lang/rust/issues/76205
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ipv6",
+ description: r##"# `const_ipv6`
+
+The tracking issue for this feature is: [#76205]
+
+[#76205]: https://github.com/rust-lang/rust/issues/76205
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_likely",
+ description: r##"# `const_likely`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_location_fields",
+ description: r##"# `const_location_fields`
+
+The tracking issue for this feature is: [#102911]
+
+[#102911]: https://github.com/rust-lang/rust/issues/102911
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_array_assume_init",
+ description: r##"# `const_maybe_uninit_array_assume_init`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_as_mut_ptr",
+ description: r##"# `const_maybe_uninit_as_mut_ptr`
+
+The tracking issue for this feature is: [#75251]
+
+[#75251]: https://github.com/rust-lang/rust/issues/75251
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_assume_init",
+ description: r##"# `const_maybe_uninit_assume_init`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_assume_init_read",
+ description: r##"# `const_maybe_uninit_assume_init_read`
+
+The tracking issue for this feature is: [#63567]
+
+[#63567]: https://github.com/rust-lang/rust/issues/63567
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_uninit_array",
+ description: r##"# `const_maybe_uninit_uninit_array`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_write",
+ description: r##"# `const_maybe_uninit_write`
+
+The tracking issue for this feature is: [#63567]
+
+[#63567]: https://github.com/rust-lang/rust/issues/63567
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_maybe_uninit_zeroed",
+ description: r##"# `const_maybe_uninit_zeroed`
+
+The tracking issue for this feature is: [#91850]
+
+[#91850]: https://github.com/rust-lang/rust/issues/91850
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_mut_refs",
+ description: r##"# `const_mut_refs`
-The tracking issue for this feature is: [#67217]
+The tracking issue for this feature is: [#57349]
-[#67217]: https://github.com/rust-lang/rust/issues/67217
+[#57349]: https://github.com/rust-lang/rust/issues/57349
-The `const_eval_limit` allows someone to limit the evaluation steps the CTFE undertakes to evaluate a `const fn`.
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_nonnull_new",
+ description: r##"# `const_nonnull_new`
+
+The tracking issue for this feature is: [#93235]
+
+[#93235]: https://github.com/rust-lang/rust/issues/93235
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_num_midpoint",
+ description: r##"# `const_num_midpoint`
+
+The tracking issue for this feature is: [#110840]
+
+[#110840]: https://github.com/rust-lang/rust/issues/110840
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_option",
+ description: r##"# `const_option`
+
+The tracking issue for this feature is: [#67441]
+
+[#67441]: https://github.com/rust-lang/rust/issues/67441
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_option_ext",
+ description: r##"# `const_option_ext`
+
+The tracking issue for this feature is: [#91930]
+
+[#91930]: https://github.com/rust-lang/rust/issues/91930
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pin",
+ description: r##"# `const_pin`
+
+The tracking issue for this feature is: [#76654]
+
+[#76654]: https://github.com/rust-lang/rust/issues/76654
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pointer_byte_offsets",
+ description: r##"# `const_pointer_byte_offsets`
+
+The tracking issue for this feature is: [#96283]
+
+[#96283]: https://github.com/rust-lang/rust/issues/96283
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pointer_is_aligned",
+ description: r##"# `const_pointer_is_aligned`
+
+The tracking issue for this feature is: [#104203]
+
+[#104203]: https://github.com/rust-lang/rust/issues/104203
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_precise_live_drops",
+ description: r##"# `const_precise_live_drops`
+
+The tracking issue for this feature is: [#73255]
+
+[#73255]: https://github.com/rust-lang/rust/issues/73255
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_pref_align_of",
+ description: r##"# `const_pref_align_of`
+
+The tracking issue for this feature is: [#91971]
+
+[#91971]: https://github.com/rust-lang/rust/issues/91971
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_as_ref",
+ description: r##"# `const_ptr_as_ref`
+
+The tracking issue for this feature is: [#91822]
+
+[#91822]: https://github.com/rust-lang/rust/issues/91822
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_is_null",
+ description: r##"# `const_ptr_is_null`
+
+The tracking issue for this feature is: [#74939]
+
+[#74939]: https://github.com/rust-lang/rust/issues/74939
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_sub_ptr",
+ description: r##"# `const_ptr_sub_ptr`
+
+The tracking issue for this feature is: [#95892]
+
+[#95892]: https://github.com/rust-lang/rust/issues/95892
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_ptr_write",
+ description: r##"# `const_ptr_write`
+
+The tracking issue for this feature is: [#86302]
+
+[#86302]: https://github.com/rust-lang/rust/issues/86302
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_range_bounds",
+ description: r##"# `const_range_bounds`
+
+The tracking issue for this feature is: [#108082]
+
+[#108082]: https://github.com/rust-lang/rust/issues/108082
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_raw_ptr_comparison",
+ description: r##"# `const_raw_ptr_comparison`
+
+The tracking issue for this feature is: [#53020]
+
+[#53020]: https://github.com/rust-lang/rust/issues/53020
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_refs_to_cell",
+ description: r##"# `const_refs_to_cell`
+
+The tracking issue for this feature is: [#80384]
+
+[#80384]: https://github.com/rust-lang/rust/issues/80384
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_replace",
+ description: r##"# `const_replace`
+
+The tracking issue for this feature is: [#83164]
+
+[#83164]: https://github.com/rust-lang/rust/issues/83164
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_result",
+ description: r##"# `const_result`
+
+The tracking issue for this feature is: [#82814]
+
+[#82814]: https://github.com/rust-lang/rust/issues/82814
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_size_of_val",
+ description: r##"# `const_size_of_val`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_size_of_val_raw",
+ description: r##"# `const_size_of_val_raw`
+
+The tracking issue for this feature is: [#46571]
+
+[#46571]: https://github.com/rust-lang/rust/issues/46571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_first_last",
+ description: r##"# `const_slice_first_last`
+
+The tracking issue for this feature is: [#83570]
+
+[#83570]: https://github.com/rust-lang/rust/issues/83570
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_mut_ptr_range",
+ description: r##"# `const_slice_from_mut_ptr_range`
+
+The tracking issue for this feature is: [#89792]
+
+[#89792]: https://github.com/rust-lang/rust/issues/89792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_ptr_range",
+ description: r##"# `const_slice_from_ptr_range`
+
+The tracking issue for this feature is: [#89792]
+
+[#89792]: https://github.com/rust-lang/rust/issues/89792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_raw_parts_mut",
+ description: r##"# `const_slice_from_raw_parts_mut`
+
+The tracking issue for this feature is: [#67456]
+
+[#67456]: https://github.com/rust-lang/rust/issues/67456
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_from_ref",
+ description: r##"# `const_slice_from_ref`
+
+The tracking issue for this feature is: [#90206]
+
+[#90206]: https://github.com/rust-lang/rust/issues/90206
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_index",
+ description: r##"# `const_slice_index`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_ptr_len",
+ description: r##"# `const_slice_ptr_len`
+
+The tracking issue for this feature is: [#71146]
+
+[#71146]: https://github.com/rust-lang/rust/issues/71146
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_slice_split_at_mut",
+ description: r##"# `const_slice_split_at_mut`
+
+The tracking issue for this feature is: [#101804]
+
+[#101804]: https://github.com/rust-lang/rust/issues/101804
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_str_from_utf8",
+ description: r##"# `const_str_from_utf8`
+
+The tracking issue for this feature is: [#91006]
+
+[#91006]: https://github.com/rust-lang/rust/issues/91006
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_str_from_utf8_unchecked_mut",
+ description: r##"# `const_str_from_utf8_unchecked_mut`
+
+The tracking issue for this feature is: [#91005]
+
+[#91005]: https://github.com/rust-lang/rust/issues/91005
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_swap",
+ description: r##"# `const_swap`
+
+The tracking issue for this feature is: [#83163]
+
+[#83163]: https://github.com/rust-lang/rust/issues/83163
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_trait_impl",
+ description: r##"# `const_trait_impl`
+
+The tracking issue for this feature is: [#67792]
+
+[#67792]: https://github.com/rust-lang/rust/issues/67792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_try",
+ description: r##"# `const_try`
+
+The tracking issue for this feature is: [#74935]
+
+[#74935]: https://github.com/rust-lang/rust/issues/74935
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_type_id",
+ description: r##"# `const_type_id`
+
+The tracking issue for this feature is: [#77125]
+
+[#77125]: https://github.com/rust-lang/rust/issues/77125
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_type_name",
+ description: r##"# `const_type_name`
+
+The tracking issue for this feature is: [#63084]
+
+[#63084]: https://github.com/rust-lang/rust/issues/63084
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_unicode_case_lookup",
+ description: r##"# `const_unicode_case_lookup`
+
+The tracking issue for this feature is: [#101400]
+
+[#101400]: https://github.com/rust-lang/rust/issues/101400
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_unsafecell_get_mut",
+ description: r##"# `const_unsafecell_get_mut`
+
+The tracking issue for this feature is: [#88836]
+
+[#88836]: https://github.com/rust-lang/rust/issues/88836
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "const_waker",
+ description: r##"# `const_waker`
+
+The tracking issue for this feature is: [#102012]
+
+[#102012]: https://github.com/rust-lang/rust/issues/102012
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "container_error_extra",
+ description: r##"# `container_error_extra`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "control_flow_enum",
+ description: r##"# `control_flow_enum`
+
+The tracking issue for this feature is: [#75744]
+
+[#75744]: https://github.com/rust-lang/rust/issues/75744
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "convert_float_to_int",
+ description: r##"# `convert_float_to_int`
+
+The tracking issue for this feature is: [#67057]
+
+[#67057]: https://github.com/rust-lang/rust/issues/67057
+
+------------------------
"##,
},
Lint {
@@ -1737,30 +3815,115 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "crate_visibility_modifier",
- description: r##"# `crate_visibility_modifier`
+ label: "coverage_attribute",
+ description: r##"# `coverage_attribute`
-The tracking issue for this feature is: [#53120]
+The tracking issue for this feature is: [#84605]
-[#53120]: https://github.com/rust-lang/rust/issues/53120
+[#84605]: https://github.com/rust-lang/rust/issues/84605
------
+---
-The `crate_visibility_modifier` feature allows the `crate` keyword to be used
-as a visibility modifier synonymous to `pub(crate)`, indicating that a type
-(function, _&c._) is to be visible to the entire enclosing crate, but not to
-other crates.
+The `coverage` attribute can be used to selectively disable coverage
+instrumentation in an annotated function. This might be useful to:
+
+- Avoid instrumentation overhead in a performance critical function
+- Avoid generating coverage for a function that is not meant to be executed,
+ but still target 100% coverage for the rest of the program.
+
+## Example
```rust
-#![feature(crate_visibility_modifier)]
+#![feature(coverage_attribute)]
-crate struct Foo {
- bar: usize,
+// `foo()` will get coverage instrumentation (by default)
+fn foo() {
+ // ...
+}
+
+#[coverage(off)]
+fn bar() {
+ // ...
}
```
"##,
},
Lint {
+ label: "cow_is_borrowed",
+ description: r##"# `cow_is_borrowed`
+
+The tracking issue for this feature is: [#65143]
+
+[#65143]: https://github.com/rust-lang/rust/issues/65143
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "csky_target_feature",
+ description: r##"# `csky_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cstr_count_bytes",
+ description: r##"# `cstr_count_bytes`
+
+The tracking issue for this feature is: [#114441]
+
+[#114441]: https://github.com/rust-lang/rust/issues/114441
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "cursor_remaining",
+ description: r##"# `cursor_remaining`
+
+The tracking issue for this feature is: [#86369]
+
+[#86369]: https://github.com/rust-lang/rust/issues/86369
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "custom_code_classes_in_docs",
+ description: r##"# `custom_code_classes_in_docs`
+
+The tracking issue for this feature is: [#79483]
+
+[#79483]: https://github.com/rust-lang/rust/issues/79483
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "custom_inner_attributes",
+ description: r##"# `custom_inner_attributes`
+
+The tracking issue for this feature is: [#54726]
+
+[#54726]: https://github.com/rust-lang/rust/issues/54726
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "custom_mir",
+ description: r##"# `custom_mir`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "custom_test_frameworks",
description: r##"# `custom_test_frameworks`
@@ -1797,6 +3960,17 @@ const WILL_FAIL: i32 = 4;
"##,
},
Lint {
+ label: "deadline_api",
+ description: r##"# `deadline_api`
+
+The tracking issue for this feature is: [#46316]
+
+[#46316]: https://github.com/rust-lang/rust/issues/46316
+
+------------------------
+"##,
+ },
+ Lint {
label: "dec2flt",
description: r##"# `dec2flt`
@@ -1806,54 +3980,47 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "default_free_fn",
- description: r##"# `default_free_fn`
+ label: "decl_macro",
+ description: r##"# `decl_macro`
-The tracking issue for this feature is: [#73014]
+The tracking issue for this feature is: [#39412]
-[#73014]: https://github.com/rust-lang/rust/issues/73014
+[#39412]: https://github.com/rust-lang/rust/issues/39412
------------------------
+"##,
+ },
+ Lint {
+ label: "default_type_parameter_fallback",
+ description: r##"# `default_type_parameter_fallback`
-Adds a free `default()` function to the `std::default` module. This function
-just forwards to [`Default::default()`], but may remove repetition of the word
-"default" from the call site.
+The tracking issue for this feature is: [#27336]
-[`Default::default()`]: https://doc.rust-lang.org/nightly/std/default/trait.Default.html#tymethod.default
+[#27336]: https://github.com/rust-lang/rust/issues/27336
-Here is an example:
+------------------------
+"##,
+ },
+ Lint {
+ label: "deprecated_safe",
+ description: r##"# `deprecated_safe`
-```rust
-#![feature(default_free_fn)]
-use std::default::default;
+The tracking issue for this feature is: [#94978]
-#[derive(Default)]
-struct AppConfig {
- foo: FooConfig,
- bar: BarConfig,
-}
+[#94978]: https://github.com/rust-lang/rust/issues/94978
-#[derive(Default)]
-struct FooConfig {
- foo: i32,
-}
+------------------------
+"##,
+ },
+ Lint {
+ label: "deprecated_suggestion",
+ description: r##"# `deprecated_suggestion`
-#[derive(Default)]
-struct BarConfig {
- bar: f32,
- baz: u8,
-}
+The tracking issue for this feature is: [#94785]
-fn main() {
- let options = AppConfig {
- foo: default(),
- bar: BarConfig {
- bar: 10.1,
- ..default()
- },
- };
-}
-```
+[#94785]: https://github.com/rust-lang/rust/issues/94785
+
+------------------------
"##,
},
Lint {
@@ -1866,6 +4033,15 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "derive_const",
+ description: r##"# `derive_const`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "derive_eq",
description: r##"# `derive_eq`
@@ -1875,6 +4051,79 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "diagnostic_namespace",
+ description: r##"# `diagnostic_namespace`
+
+The tracking issue for this feature is: [#111996]
+
+[#111996]: https://github.com/rust-lang/rust/issues/111996
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dir_entry_ext2",
+ description: r##"# `dir_entry_ext2`
+
+The tracking issue for this feature is: [#85573]
+
+[#85573]: https://github.com/rust-lang/rust/issues/85573
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "discriminant_kind",
+ description: r##"# `discriminant_kind`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dispatch_from_dyn",
+ description: r##"# `dispatch_from_dyn`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "div_duration",
+ description: r##"# `div_duration`
+
+The tracking issue for this feature is: [#63139]
+
+[#63139]: https://github.com/rust-lang/rust/issues/63139
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "do_not_recommend",
+ description: r##"# `do_not_recommend`
+
+The tracking issue for this feature is: [#51992]
+
+[#51992]: https://github.com/rust-lang/rust/issues/51992
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "doc_auto_cfg",
+ description: r##"# `doc_auto_cfg`
+
+The tracking issue for this feature is: [#43781]
+
+[#43781]: https://github.com/rust-lang/rust/issues/43781
+
+------------------------
+"##,
+ },
+ Lint {
label: "doc_cfg",
description: r##"# `doc_cfg`
@@ -1885,7 +4134,7 @@ The tracking issue for this feature is: [#43781]
The `doc_cfg` feature allows an API be documented as only available in some specific platforms.
This attribute has two effects:
-1. In the annotated item's documentation, there will be a message saying "This is supported on
+1. In the annotated item's documentation, there will be a message saying "Available on
(platform) only".
2. The item's doc-tests will only run on the specific platform.
@@ -1925,6 +4174,17 @@ pub struct Icon {
"##,
},
Lint {
+ label: "doc_cfg_hide",
+ description: r##"# `doc_cfg_hide`
+
+The tracking issue for this feature is: [#43781]
+
+[#43781]: https://github.com/rust-lang/rust/issues/43781
+
+------------------------
+"##,
+ },
+ Lint {
label: "doc_masked",
description: r##"# `doc_masked`
@@ -1990,6 +4250,180 @@ See also its documentation in [the rustdoc book][rustdoc-book-notable_trait].
"##,
},
Lint {
+ label: "downcast_unchecked",
+ description: r##"# `downcast_unchecked`
+
+The tracking issue for this feature is: [#90850]
+
+[#90850]: https://github.com/rust-lang/rust/issues/90850
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "drain_keep_rest",
+ description: r##"# `drain_keep_rest`
+
+The tracking issue for this feature is: [#101122]
+
+[#101122]: https://github.com/rust-lang/rust/issues/101122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dropck_eyepatch",
+ description: r##"# `dropck_eyepatch`
+
+The tracking issue for this feature is: [#34761]
+
+[#34761]: https://github.com/rust-lang/rust/issues/34761
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "duration_constants",
+ description: r##"# `duration_constants`
+
+The tracking issue for this feature is: [#57391]
+
+[#57391]: https://github.com/rust-lang/rust/issues/57391
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "duration_consts_float",
+ description: r##"# `duration_consts_float`
+
+The tracking issue for this feature is: [#72440]
+
+[#72440]: https://github.com/rust-lang/rust/issues/72440
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "dyn_star",
+ description: r##"# `dyn_star`
+
+The tracking issue for this feature is: [#102425]
+
+[#102425]: https://github.com/rust-lang/rust/issues/102425
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "edition_panic",
+ description: r##"# `edition_panic`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "effects",
+ description: r##"# `effects`
+
+The tracking issue for this feature is: [#102090]
+
+[#102090]: https://github.com/rust-lang/rust/issues/102090
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "entry_insert",
+ description: r##"# `entry_insert`
+
+The tracking issue for this feature is: [#65225]
+
+[#65225]: https://github.com/rust-lang/rust/issues/65225
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ermsb_target_feature",
+ description: r##"# `ermsb_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_generic_member_access",
+ description: r##"# `error_generic_member_access`
+
+The tracking issue for this feature is: [#99301]
+
+[#99301]: https://github.com/rust-lang/rust/issues/99301
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_in_core",
+ description: r##"# `error_in_core`
+
+The tracking issue for this feature is: [#103765]
+
+[#103765]: https://github.com/rust-lang/rust/issues/103765
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_iter",
+ description: r##"# `error_iter`
+
+The tracking issue for this feature is: [#58520]
+
+[#58520]: https://github.com/rust-lang/rust/issues/58520
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_reporter",
+ description: r##"# `error_reporter`
+
+The tracking issue for this feature is: [#90172]
+
+[#90172]: https://github.com/rust-lang/rust/issues/90172
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "error_type_id",
+ description: r##"# `error_type_id`
+
+The tracking issue for this feature is: [#60784]
+
+[#60784]: https://github.com/rust-lang/rust/issues/60784
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "exact_size_is_empty",
+ description: r##"# `exact_size_is_empty`
+
+The tracking issue for this feature is: [#35428]
+
+[#35428]: https://github.com/rust-lang/rust/issues/35428
+
+------------------------
+"##,
+ },
+ Lint {
label: "exclusive_range_pattern",
description: r##"# `exclusive_range_pattern`
@@ -2020,60 +4454,105 @@ stabilized.
"##,
},
Lint {
- label: "explicit_generic_args_with_impl_trait",
- description: r##"# `explicit_generic_args_with_impl_trait`
+ label: "exclusive_wrapper",
+ description: r##"# `exclusive_wrapper`
-The tracking issue for this feature is: [#83701]
+The tracking issue for this feature is: [#98407]
-[#83701]: https://github.com/rust-lang/rust/issues/83701
+[#98407]: https://github.com/rust-lang/rust/issues/98407
------------------------
+"##,
+ },
+ Lint {
+ label: "exhaustive_patterns",
+ description: r##"# `exhaustive_patterns`
-The `explicit_generic_args_with_impl_trait` feature gate lets you specify generic arguments even
-when `impl Trait` is used in argument position.
+The tracking issue for this feature is: [#51085]
-A simple example is:
+[#51085]: https://github.com/rust-lang/rust/issues/51085
-```rust
-#![feature(explicit_generic_args_with_impl_trait)]
+------------------------
+"##,
+ },
+ Lint {
+ label: "exit_status_error",
+ description: r##"# `exit_status_error`
-fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
+The tracking issue for this feature is: [#84908]
-fn main() {
- foo::<str>("".to_string());
-}
-```
+[#84908]: https://github.com/rust-lang/rust/issues/84908
-This is currently rejected:
+------------------------
+"##,
+ },
+ Lint {
+ label: "exitcode_exit_method",
+ description: r##"# `exitcode_exit_method`
-```text
-error[E0632]: cannot provide explicit generic arguments when `impl Trait` is used in argument position
- --> src/main.rs:6:11
- |
-6 | foo::<str>("".to_string());
- | ^^^ explicit generic argument not allowed
+The tracking issue for this feature is: [#97100]
-```
+[#97100]: https://github.com/rust-lang/rust/issues/97100
-However it would compile if `explicit_generic_args_with_impl_trait` is enabled.
+------------------------
+"##,
+ },
+ Lint {
+ label: "explicit_tail_calls",
+ description: r##"# `explicit_tail_calls`
-Note that the synthetic type parameters from `impl Trait` are still implicit and you
-cannot explicitly specify these:
+The tracking issue for this feature is: [#112788]
-```rust,compile_fail
-#![feature(explicit_generic_args_with_impl_trait)]
+[#112788]: https://github.com/rust-lang/rust/issues/112788
-fn foo<T: ?Sized>(_f: impl AsRef<T>) {}
-fn bar<T: ?Sized, F: AsRef<T>>(_f: F) {}
+------------------------
+"##,
+ },
+ Lint {
+ label: "extend_one",
+ description: r##"# `extend_one`
-fn main() {
- bar::<str, _>("".to_string()); // Okay
- bar::<str, String>("".to_string()); // Okay
+The tracking issue for this feature is: [#72631]
- foo::<str>("".to_string()); // Okay
- foo::<str, String>("".to_string()); // Error, you cannot specify `impl Trait` explicitly
-}
-```
+[#72631]: https://github.com/rust-lang/rust/issues/72631
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "extended_varargs_abi_support",
+ description: r##"# `extended_varargs_abi_support`
+
+The tracking issue for this feature is: [#100189]
+
+[#100189]: https://github.com/rust-lang/rust/issues/100189
+
+------------------------
+
+This feature adds the possibility of using `sysv64`, `win64` or `efiapi` calling
+conventions on functions with varargs.
+"##,
+ },
+ Lint {
+ label: "extern_types",
+ description: r##"# `extern_types`
+
+The tracking issue for this feature is: [#43467]
+
+[#43467]: https://github.com/rust-lang/rust/issues/43467
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "extract_if",
+ description: r##"# `extract_if`
+
+The tracking issue for this feature is: [#43244]
+
+[#43244]: https://github.com/rust-lang/rust/issues/43244
+
+------------------------
"##,
},
Lint {
@@ -2211,6 +4690,72 @@ against are compatible with those of the `#[ffi_pure]`.
"##,
},
Lint {
+ label: "ffi_returns_twice",
+ description: r##"# `ffi_returns_twice`
+
+The tracking issue for this feature is: [#58314]
+
+[#58314]: https://github.com/rust-lang/rust/issues/58314
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "file_create_new",
+ description: r##"# `file_create_new`
+
+The tracking issue for this feature is: [#105135]
+
+[#105135]: https://github.com/rust-lang/rust/issues/105135
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "file_set_times",
+ description: r##"# `file_set_times`
+
+The tracking issue for this feature is: [#98245]
+
+[#98245]: https://github.com/rust-lang/rust/issues/98245
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "float_gamma",
+ description: r##"# `float_gamma`
+
+The tracking issue for this feature is: [#99842]
+
+[#99842]: https://github.com/rust-lang/rust/issues/99842
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "float_minimum_maximum",
+ description: r##"# `float_minimum_maximum`
+
+The tracking issue for this feature is: [#91079]
+
+[#91079]: https://github.com/rust-lang/rust/issues/91079
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "float_next_up_down",
+ description: r##"# `float_next_up_down`
+
+The tracking issue for this feature is: [#91399]
+
+[#91399]: https://github.com/rust-lang/rust/issues/91399
+
+------------------------
+"##,
+ },
+ Lint {
label: "flt2dec",
description: r##"# `flt2dec`
@@ -2220,6 +4765,15 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "fmt_helpers_for_derive",
+ description: r##"# `fmt_helpers_for_derive`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "fmt_internals",
description: r##"# `fmt_internals`
@@ -2229,6 +4783,26 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "fn_align",
+ description: r##"# `fn_align`
+
+The tracking issue for this feature is: [#82232]
+
+[#82232]: https://github.com/rust-lang/rust/issues/82232
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fn_ptr_trait",
+ description: r##"# `fn_ptr_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "fn_traits",
description: r##"# `fn_traits`
@@ -2243,7 +4817,7 @@ See Also: [`unboxed_closures`](../language-features/unboxed-closures.md)
The `fn_traits` feature allows for implementation of the [`Fn*`] traits
for creating custom closure-like types.
-[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+[`Fn*`]: ../../std/ops/trait.Fn.html
```rust
#![feature(unboxed_closures)]
@@ -2268,6 +4842,90 @@ fn main() {
"##,
},
Lint {
+ label: "forget_unsized",
+ description: r##"# `forget_unsized`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "format_args_nl",
+ description: r##"# `format_args_nl`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fs_try_exists",
+ description: r##"# `fs_try_exists`
+
+The tracking issue for this feature is: [#83186]
+
+[#83186]: https://github.com/rust-lang/rust/issues/83186
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "fundamental",
+ description: r##"# `fundamental`
+
+The tracking issue for this feature is: [#29635]
+
+[#29635]: https://github.com/rust-lang/rust/issues/29635
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "future_join",
+ description: r##"# `future_join`
+
+The tracking issue for this feature is: [#91642]
+
+[#91642]: https://github.com/rust-lang/rust/issues/91642
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "gen_future",
+ description: r##"# `gen_future`
+
+The tracking issue for this feature is: [#50547]
+
+[#50547]: https://github.com/rust-lang/rust/issues/50547
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generator_clone",
+ description: r##"# `generator_clone`
+
+The tracking issue for this feature is: [#95360]
+
+[#95360]: https://github.com/rust-lang/rust/issues/95360
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generator_trait",
+ description: r##"# `generator_trait`
+
+The tracking issue for this feature is: [#43122]
+
+[#43122]: https://github.com/rust-lang/rust/issues/43122
+
+------------------------
+"##,
+ },
+ Lint {
label: "generators",
description: r##"# `generators`
@@ -2518,82 +5176,253 @@ does.
"##,
},
Lint {
- label: "half_open_range_patterns",
- description: r##"# `half_open_range_patterns`
+ label: "generic_arg_infer",
+ description: r##"# `generic_arg_infer`
+
+The tracking issue for this feature is: [#85077]
+
+[#85077]: https://github.com/rust-lang/rust/issues/85077
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_assert",
+ description: r##"# `generic_assert`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_assert_internals",
+ description: r##"# `generic_assert_internals`
+
+The tracking issue for this feature is: [#44838]
+
+[#44838]: https://github.com/rust-lang/rust/issues/44838
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_associated_types_extended",
+ description: r##"# `generic_associated_types_extended`
+
+The tracking issue for this feature is: [#95451]
+
+[#95451]: https://github.com/rust-lang/rust/issues/95451
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_const_exprs",
+ description: r##"# `generic_const_exprs`
+
+The tracking issue for this feature is: [#76560]
+
+[#76560]: https://github.com/rust-lang/rust/issues/76560
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "generic_const_items",
+ description: r##"# `generic_const_items`
+
+The tracking issue for this feature is: [#113521]
+
+[#113521]: https://github.com/rust-lang/rust/issues/113521
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "get_many_mut",
+ description: r##"# `get_many_mut`
+
+The tracking issue for this feature is: [#104642]
+
+[#104642]: https://github.com/rust-lang/rust/issues/104642
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "get_mut_unchecked",
+ description: r##"# `get_mut_unchecked`
+
+The tracking issue for this feature is: [#63292]
+
+[#63292]: https://github.com/rust-lang/rust/issues/63292
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "half_open_range_patterns_in_slices",
+ description: r##"# `half_open_range_patterns_in_slices`
The tracking issue for this feature is: [#67264]
-It is part of the `#![exclusive_range_pattern]` feature,
+It is part of the `exclusive_range_pattern` feature,
tracked at [#37854].
[#67264]: https://github.com/rust-lang/rust/issues/67264
[#37854]: https://github.com/rust-lang/rust/issues/37854
-----
-The `half_open_range_patterns` feature allows RangeTo patterns
-(`..10`) to be used in appropriate pattern matching contexts.
-This requires also enabling the `exclusive_range_pattern` feature.
-
-It also enabled RangeFrom patterns but that has since been
-stabilized.
+This feature allow using top-level half-open range patterns in slices.
```rust
-#![feature(half_open_range_patterns)]
+#![feature(half_open_range_patterns_in_slices)]
#![feature(exclusive_range_pattern)]
- let x = 5;
- match x {
- ..0 => println!("negative!"), // "RangeTo" pattern. Unstable.
- 0 => println!("zero!"),
- 1.. => println!("positive!"), // "RangeFrom" pattern. Stable.
- }
+
+fn main() {
+ let xs = [13, 1, 5, 2, 3, 1, 21, 8];
+ let [a @ 3.., b @ ..3, c @ 4..6, ..] = xs else { return; };
+}
+```
+
+Note that this feature is not required if the patterns are wrapped between parenthesis.
+
+```rust
+fn main() {
+ let xs = [13, 1];
+ let [(a @ 3..), c] = xs else { return; };
+}
```
"##,
},
Lint {
- label: "infer_static_outlives_requirements",
- description: r##"# `infer_static_outlives_requirements`
+ label: "hash_extract_if",
+ description: r##"# `hash_extract_if`
-The tracking issue for this feature is: [#54185]
+The tracking issue for this feature is: [#59618]
-[#54185]: https://github.com/rust-lang/rust/issues/54185
+[#59618]: https://github.com/rust-lang/rust/issues/59618
------------------------
-The `infer_static_outlives_requirements` feature indicates that certain
-`'static` outlives requirements can be inferred by the compiler rather than
-stating them explicitly.
+"##,
+ },
+ Lint {
+ label: "hash_raw_entry",
+ description: r##"# `hash_raw_entry`
-Note: It is an accompanying feature to `infer_outlives_requirements`,
-which must be enabled to infer outlives requirements.
+The tracking issue for this feature is: [#56167]
-For example, currently generic struct definitions that contain
-references, require where-clauses of the form T: 'static. By using
-this feature the outlives predicates will be inferred, although
-they may still be written explicitly.
+[#56167]: https://github.com/rust-lang/rust/issues/56167
-```rust,ignore (pseudo-Rust)
-struct Foo<U> where U: 'static { // <-- currently required
- bar: Bar<U>
-}
-struct Bar<T: 'static> {
- x: T,
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "hash_set_entry",
+ description: r##"# `hash_set_entry`
+The tracking issue for this feature is: [#60896]
-## Examples:
+[#60896]: https://github.com/rust-lang/rust/issues/60896
-```rust,ignore (pseudo-Rust)
-#![feature(infer_outlives_requirements)]
-#![feature(infer_static_outlives_requirements)]
+------------------------
+"##,
+ },
+ Lint {
+ label: "hasher_prefixfree_extras",
+ description: r##"# `hasher_prefixfree_extras`
-#[rustc_outlives]
-// Implicitly infer U: 'static
-struct Foo<U> {
- bar: Bar<U>
-}
-struct Bar<T: 'static> {
- x: T,
-}
-```
+The tracking issue for this feature is: [#96762]
+
+[#96762]: https://github.com/rust-lang/rust/issues/96762
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "hashmap_internals",
+ description: r##"# `hashmap_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "hexagon_target_feature",
+ description: r##"# `hexagon_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "hint_must_use",
+ description: r##"# `hint_must_use`
+
+The tracking issue for this feature is: [#94745]
+
+[#94745]: https://github.com/rust-lang/rust/issues/94745
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "if_let_guard",
+ description: r##"# `if_let_guard`
+
+The tracking issue for this feature is: [#51114]
+
+[#51114]: https://github.com/rust-lang/rust/issues/51114
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "impl_trait_in_assoc_type",
+ description: r##"# `impl_trait_in_assoc_type`
+
+The tracking issue for this feature is: [#63063]
+
+[#63063]: https://github.com/rust-lang/rust/issues/63063
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "impl_trait_in_fn_trait_return",
+ description: r##"# `impl_trait_in_fn_trait_return`
+
+The tracking issue for this feature is: [#99697]
+
+[#99697]: https://github.com/rust-lang/rust/issues/99697
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "imported_main",
+ description: r##"# `imported_main`
+
+The tracking issue for this feature is: [#28937]
+
+[#28937]: https://github.com/rust-lang/rust/issues/28937
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "inherent_associated_types",
+ description: r##"# `inherent_associated_types`
+
+The tracking issue for this feature is: [#8995]
+
+[#8995]: https://github.com/rust-lang/rust/issues/8995
+
+------------------------
"##,
},
Lint {
@@ -2661,10 +5490,41 @@ match some_int {
"##,
},
Lint {
- label: "int_error_internals",
- description: r##"# `int_error_internals`
+ label: "inplace_iteration",
+ description: r##"# `inplace_iteration`
-This feature is internal to the Rust compiler and is not intended for general use.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "int_roundings",
+ description: r##"# `int_roundings`
+
+The tracking issue for this feature is: [#88581]
+
+[#88581]: https://github.com/rust-lang/rust/issues/88581
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "integer_atomics",
+ description: r##"# `integer_atomics`
+
+The tracking issue for this feature is: [#99069]
+
+[#99069]: https://github.com/rust-lang/rust/issues/99069
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "internal_impls_macro",
+ description: r##"# `internal_impls_macro`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
"##,
@@ -2718,12 +5578,13 @@ via a declaration like
```rust
#![feature(intrinsics)]
+#![allow(internal_features)]
# fn main() {}
extern "rust-intrinsic" {
fn transmute<T, U>(x: T) -> U;
- fn offset<T>(dst: *const T, offset: isize) -> *const T;
+ fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
}
```
@@ -2731,6 +5592,92 @@ As with any other FFI functions, these are always `unsafe` to call.
"##,
},
Lint {
+ label: "io_error_downcast",
+ description: r##"# `io_error_downcast`
+
+The tracking issue for this feature is: [#99262]
+
+[#99262]: https://github.com/rust-lang/rust/issues/99262
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "io_error_more",
+ description: r##"# `io_error_more`
+
+The tracking issue for this feature is: [#86442]
+
+[#86442]: https://github.com/rust-lang/rust/issues/86442
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "io_error_uncategorized",
+ description: r##"# `io_error_uncategorized`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "io_slice_advance",
+ description: r##"# `io_slice_advance`
+
+The tracking issue for this feature is: [#62726]
+
+[#62726]: https://github.com/rust-lang/rust/issues/62726
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ip",
+ description: r##"# `ip`
+
+The tracking issue for this feature is: [#27709]
+
+[#27709]: https://github.com/rust-lang/rust/issues/27709
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ip_bits",
+ description: r##"# `ip_bits`
+
+The tracking issue for this feature is: [#113744]
+
+[#113744]: https://github.com/rust-lang/rust/issues/113744
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ip_in_core",
+ description: r##"# `ip_in_core`
+
+The tracking issue for this feature is: [#108443]
+
+[#108443]: https://github.com/rust-lang/rust/issues/108443
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "is_ascii_octdigit",
+ description: r##"# `is_ascii_octdigit`
+
+The tracking issue for this feature is: [#101288]
+
+[#101288]: https://github.com/rust-lang/rust/issues/101288
+
+------------------------
+"##,
+ },
+ Lint {
label: "is_sorted",
description: r##"# `is_sorted`
@@ -2746,6 +5693,160 @@ add the methods `is_sorted`, `is_sorted_by` and `is_sorted_by_key` to
"##,
},
Lint {
+ label: "isqrt",
+ description: r##"# `isqrt`
+
+The tracking issue for this feature is: [#116226]
+
+[#116226]: https://github.com/rust-lang/rust/issues/116226
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_advance_by",
+ description: r##"# `iter_advance_by`
+
+The tracking issue for this feature is: [#77404]
+
+[#77404]: https://github.com/rust-lang/rust/issues/77404
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_array_chunks",
+ description: r##"# `iter_array_chunks`
+
+The tracking issue for this feature is: [#100450]
+
+[#100450]: https://github.com/rust-lang/rust/issues/100450
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_collect_into",
+ description: r##"# `iter_collect_into`
+
+The tracking issue for this feature is: [#94780]
+
+[#94780]: https://github.com/rust-lang/rust/issues/94780
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_from_generator",
+ description: r##"# `iter_from_generator`
+
+The tracking issue for this feature is: [#43122]
+
+[#43122]: https://github.com/rust-lang/rust/issues/43122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_intersperse",
+ description: r##"# `iter_intersperse`
+
+The tracking issue for this feature is: [#79524]
+
+[#79524]: https://github.com/rust-lang/rust/issues/79524
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_is_partitioned",
+ description: r##"# `iter_is_partitioned`
+
+The tracking issue for this feature is: [#62544]
+
+[#62544]: https://github.com/rust-lang/rust/issues/62544
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_map_windows",
+ description: r##"# `iter_map_windows`
+
+The tracking issue for this feature is: [#87155]
+
+[#87155]: https://github.com/rust-lang/rust/issues/87155
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_next_chunk",
+ description: r##"# `iter_next_chunk`
+
+The tracking issue for this feature is: [#98326]
+
+[#98326]: https://github.com/rust-lang/rust/issues/98326
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_order_by",
+ description: r##"# `iter_order_by`
+
+The tracking issue for this feature is: [#64295]
+
+[#64295]: https://github.com/rust-lang/rust/issues/64295
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_partition_in_place",
+ description: r##"# `iter_partition_in_place`
+
+The tracking issue for this feature is: [#62543]
+
+[#62543]: https://github.com/rust-lang/rust/issues/62543
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iter_repeat_n",
+ description: r##"# `iter_repeat_n`
+
+The tracking issue for this feature is: [#104434]
+
+[#104434]: https://github.com/rust-lang/rust/issues/104434
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iterator_try_collect",
+ description: r##"# `iterator_try_collect`
+
+The tracking issue for this feature is: [#94047]
+
+[#94047]: https://github.com/rust-lang/rust/issues/94047
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "iterator_try_reduce",
+ description: r##"# `iterator_try_reduce`
+
+The tracking issue for this feature is: [#87053]
+
+[#87053]: https://github.com/rust-lang/rust/issues/87053
+
+------------------------
+"##,
+ },
+ Lint {
label: "lang_items",
description: r##"# `lang_items`
@@ -2758,304 +5859,190 @@ functionality that isn't hard-coded into the language, but is
implemented in libraries, with a special marker to tell the compiler
it exists. The marker is the attribute `#[lang = "..."]` and there are
various different values of `...`, i.e. various different 'lang
-items'.
+items'. Most of them can only be defined once.
-For example, `Box` pointers require two lang items, one for allocation
-and one for deallocation. A freestanding program that uses the `Box`
-sugar for dynamic allocations via `malloc` and `free`:
+Lang items are loaded lazily by the compiler; e.g. if one never uses `Box`
+then there is no need to define a function for `exchange_malloc`.
+`rustc` will emit an error when an item is needed but not found in the current
+crate or any that it depends on.
+
+Some features provided by lang items:
+
+- overloadable operators via traits: the traits corresponding to the
+ `==`, `<`, dereferencing (`*`) and `+` (etc.) operators are all
+ marked with lang items; those specific four are `eq`, `partial_ord`,
+ `deref`/`deref_mut`, and `add` respectively.
+- panicking: the `panic` and `panic_impl` lang items, among others.
+- stack unwinding: the lang item `eh_personality` is a function used by the
+ failure mechanisms of the compiler. This is often mapped to GCC's personality
+ function (see the [`std` implementation][personality] for more information),
+ but programs which don't trigger a panic can be assured that this function is
+ never called. Additionally, a `eh_catch_typeinfo` static is needed for certain
+ targets which implement Rust panics on top of C++ exceptions.
+- the traits in `core::marker` used to indicate types of
+ various kinds; e.g. lang items `sized`, `sync` and `copy`.
+- memory allocation, see below.
+
+Most lang items are defined by `core`, but if you're trying to build
+an executable without the `std` crate, you might run into the need
+for lang item definitions.
+
+[personality]: https://github.com/rust-lang/rust/blob/master/library/std/src/sys/personality/gcc.rs
+
+## Example: Implementing a `Box`
+
+`Box` pointers require two lang items: one for the type itself and one for
+allocation. A freestanding program that uses the `Box` sugar for dynamic
+allocations via `malloc` and `free`:
```rust,ignore (libc-is-finicky)
-#![feature(lang_items, box_syntax, start, libc, core_intrinsics, rustc_private)]
+#![feature(lang_items, start, core_intrinsics, rustc_private, panic_unwind, rustc_attrs)]
+#![allow(internal_features)]
#![no_std]
+
+extern crate libc;
+extern crate unwind;
+
+use core::ffi::c_void;
use core::intrinsics;
use core::panic::PanicInfo;
+use core::ptr::NonNull;
-extern crate libc;
+pub struct Global; // the global allocator
+struct Unique<T>(NonNull<T>);
#[lang = "owned_box"]
-pub struct Box<T>(*mut T);
+pub struct Box<T, A = Global>(Unique<T>, A);
+
+impl<T> Box<T> {
+ pub fn new(x: T) -> Self {
+ #[rustc_box]
+ Box::new(x)
+ }
+}
+
+impl<T, A> Drop for Box<T, A> {
+ fn drop(&mut self) {
+ unsafe {
+ libc::free(self.0.0.as_ptr() as *mut c_void);
+ }
+ }
+}
#[lang = "exchange_malloc"]
unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
- let p = libc::malloc(size as libc::size_t) as *mut u8;
+ let p = libc::malloc(size) as *mut u8;
// Check if `malloc` failed:
- if p as usize == 0 {
+ if p.is_null() {
intrinsics::abort();
}
p
}
-#[lang = "box_free"]
-unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
- libc::free(ptr as *mut libc::c_void)
-}
-
#[start]
fn main(_argc: isize, _argv: *const *const u8) -> isize {
- let _x = box 1;
+ let _x = Box::new(1);
0
}
-#[lang = "eh_personality"] extern fn rust_eh_personality() {}
-#[lang = "panic_impl"] extern fn rust_begin_panic(info: &PanicInfo) -> ! { unsafe { intrinsics::abort() } }
-#[no_mangle] pub extern fn rust_eh_register_frames () {}
-#[no_mangle] pub extern fn rust_eh_unregister_frames () {}
+#[lang = "eh_personality"]
+fn rust_eh_personality() {}
+
+#[panic_handler]
+fn panic_handler(_info: &PanicInfo) -> ! { intrinsics::abort() }
```
Note the use of `abort`: the `exchange_malloc` lang item is assumed to
return a valid pointer, and so needs to do the check internally.
-Other features provided by lang items include:
-
-- overloadable operators via traits: the traits corresponding to the
- `==`, `<`, dereferencing (`*`) and `+` (etc.) operators are all
- marked with lang items; those specific four are `eq`, `ord`,
- `deref`, and `add` respectively.
-- stack unwinding and general failure; the `eh_personality`,
- `panic` and `panic_bounds_check` lang items.
-- the traits in `std::marker` used to indicate types of
- various kinds; lang items `send`, `sync` and `copy`.
-- the marker types and variance indicators found in
- `std::marker`; lang items `covariant_type`,
- `contravariant_lifetime`, etc.
-
-Lang items are loaded lazily by the compiler; e.g. if one never uses
-`Box` then there is no need to define functions for `exchange_malloc`
-and `box_free`. `rustc` will emit an error when an item is needed
-but not found in the current crate or any that it depends on.
-
-Most lang items are defined by `libcore`, but if you're trying to build
-an executable without the standard library, you'll run into the need
-for lang items. The rest of this page focuses on this use-case, even though
-lang items are a bit broader than that.
-
-### Using libc
-
-In order to build a `#[no_std]` executable we will need libc as a dependency.
-We can specify this using our `Cargo.toml` file:
-
-```toml
-[dependencies]
-libc = { version = "0.2.14", default-features = false }
-```
-
-Note that the default features have been disabled. This is a critical step -
-**the default features of libc include the standard library and so must be
-disabled.**
-
-### Writing an executable without stdlib
-
-Controlling the entry point is possible in two ways: the `#[start]` attribute,
-or overriding the default shim for the C `main` function with your own.
-
-The function marked `#[start]` is passed the command line parameters
-in the same format as C:
+## List of all language items
-```rust,ignore (libc-is-finicky)
-#![feature(lang_items, core_intrinsics, rustc_private)]
-#![feature(start)]
-#![no_std]
-use core::intrinsics;
-use core::panic::PanicInfo;
+An up-to-date list of all language items can be found [here] in the compiler code.
-// Pull in the system libc library for what crt0.o likely requires.
-extern crate libc;
+[here]: https://github.com/rust-lang/rust/blob/master/compiler/rustc_hir/src/lang_items.rs
+"##,
+ },
+ Lint {
+ label: "large_assignments",
+ description: r##"# `large_assignments`
-// Entry point for this program.
-#[start]
-fn start(_argc: isize, _argv: *const *const u8) -> isize {
- 0
-}
+The tracking issue for this feature is: [#83518]
-// These functions are used by the compiler, but not
-// for a bare-bones hello world. These are normally
-// provided by libstd.
-#[lang = "eh_personality"]
-#[no_mangle]
-pub extern fn rust_eh_personality() {
-}
+[#83518]: https://github.com/rust-lang/rust/issues/83518
-#[lang = "panic_impl"]
-#[no_mangle]
-pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
- unsafe { intrinsics::abort() }
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "layout_for_ptr",
+ description: r##"# `layout_for_ptr`
-To override the compiler-inserted `main` shim, one has to disable it
-with `#![no_main]` and then create the appropriate symbol with the
-correct ABI and the correct name, which requires overriding the
-compiler's name mangling too:
+The tracking issue for this feature is: [#69835]
-```rust,ignore (libc-is-finicky)
-#![feature(lang_items, core_intrinsics, rustc_private)]
-#![feature(start)]
-#![no_std]
-#![no_main]
-use core::intrinsics;
-use core::panic::PanicInfo;
+[#69835]: https://github.com/rust-lang/rust/issues/69835
-// Pull in the system libc library for what crt0.o likely requires.
-extern crate libc;
+------------------------
+"##,
+ },
+ Lint {
+ label: "lazy_cell",
+ description: r##"# `lazy_cell`
-// Entry point for this program.
-#[no_mangle] // ensure that this symbol is called `main` in the output
-pub extern fn main(_argc: i32, _argv: *const *const u8) -> i32 {
- 0
-}
+The tracking issue for this feature is: [#109736]
-// These functions are used by the compiler, but not
-// for a bare-bones hello world. These are normally
-// provided by libstd.
-#[lang = "eh_personality"]
-#[no_mangle]
-pub extern fn rust_eh_personality() {
-}
+[#109736]: https://github.com/rust-lang/rust/issues/109736
-#[lang = "panic_impl"]
-#[no_mangle]
-pub extern fn rust_begin_panic(info: &PanicInfo) -> ! {
- unsafe { intrinsics::abort() }
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "lazy_cell_consume",
+ description: r##"# `lazy_cell_consume`
-In many cases, you may need to manually link to the `compiler_builtins` crate
-when building a `no_std` binary. You may observe this via linker error messages
-such as "```undefined reference to `__rust_probestack'```".
+The tracking issue for this feature is: [#109736]
-## More about the language items
+[#109736]: https://github.com/rust-lang/rust/issues/109736
-The compiler currently makes a few assumptions about symbols which are
-available in the executable to call. Normally these functions are provided by
-the standard library, but without it you must define your own. These symbols
-are called "language items", and they each have an internal name, and then a
-signature that an implementation must conform to.
+------------------------
+"##,
+ },
+ Lint {
+ label: "lazy_type_alias",
+ description: r##"# `lazy_type_alias`
-The first of these functions, `rust_eh_personality`, is used by the failure
-mechanisms of the compiler. This is often mapped to GCC's personality function
-(see the [libstd implementation][unwind] for more information), but crates
-which do not trigger a panic can be assured that this function is never
-called. The language item's name is `eh_personality`.
+The tracking issue for this feature is: [#112792]
-[unwind]: https://github.com/rust-lang/rust/blob/master/library/panic_unwind/src/gcc.rs
+[#112792]: https://github.com/rust-lang/rust/issues/112792
-The second function, `rust_begin_panic`, is also used by the failure mechanisms of the
-compiler. When a panic happens, this controls the message that's displayed on
-the screen. While the language item's name is `panic_impl`, the symbol name is
-`rust_begin_panic`.
+------------------------
+"##,
+ },
+ Lint {
+ label: "let_chains",
+ description: r##"# `let_chains`
-Finally, a `eh_catch_typeinfo` static is needed for certain targets which
-implement Rust panics on top of C++ exceptions.
+The tracking issue for this feature is: [#53667]
-## List of all language items
+[#53667]: https://github.com/rust-lang/rust/issues/53667
-This is a list of all language items in Rust along with where they are located in
-the source code.
-
-- Primitives
- - `i8`: `libcore/num/mod.rs`
- - `i16`: `libcore/num/mod.rs`
- - `i32`: `libcore/num/mod.rs`
- - `i64`: `libcore/num/mod.rs`
- - `i128`: `libcore/num/mod.rs`
- - `isize`: `libcore/num/mod.rs`
- - `u8`: `libcore/num/mod.rs`
- - `u16`: `libcore/num/mod.rs`
- - `u32`: `libcore/num/mod.rs`
- - `u64`: `libcore/num/mod.rs`
- - `u128`: `libcore/num/mod.rs`
- - `usize`: `libcore/num/mod.rs`
- - `f32`: `libstd/f32.rs`
- - `f64`: `libstd/f64.rs`
- - `char`: `libcore/char.rs`
- - `slice`: `liballoc/slice.rs`
- - `str`: `liballoc/str.rs`
- - `const_ptr`: `libcore/ptr.rs`
- - `mut_ptr`: `libcore/ptr.rs`
- - `unsafe_cell`: `libcore/cell.rs`
-- Runtime
- - `start`: `libstd/rt.rs`
- - `eh_personality`: `libpanic_unwind/emcc.rs` (EMCC)
- - `eh_personality`: `libpanic_unwind/gcc.rs` (GNU)
- - `eh_personality`: `libpanic_unwind/seh.rs` (SEH)
- - `eh_catch_typeinfo`: `libpanic_unwind/emcc.rs` (EMCC)
- - `panic`: `libcore/panicking.rs`
- - `panic_bounds_check`: `libcore/panicking.rs`
- - `panic_impl`: `libcore/panicking.rs`
- - `panic_impl`: `libstd/panicking.rs`
-- Allocations
- - `owned_box`: `liballoc/boxed.rs`
- - `exchange_malloc`: `liballoc/heap.rs`
- - `box_free`: `liballoc/heap.rs`
-- Operands
- - `not`: `libcore/ops/bit.rs`
- - `bitand`: `libcore/ops/bit.rs`
- - `bitor`: `libcore/ops/bit.rs`
- - `bitxor`: `libcore/ops/bit.rs`
- - `shl`: `libcore/ops/bit.rs`
- - `shr`: `libcore/ops/bit.rs`
- - `bitand_assign`: `libcore/ops/bit.rs`
- - `bitor_assign`: `libcore/ops/bit.rs`
- - `bitxor_assign`: `libcore/ops/bit.rs`
- - `shl_assign`: `libcore/ops/bit.rs`
- - `shr_assign`: `libcore/ops/bit.rs`
- - `deref`: `libcore/ops/deref.rs`
- - `deref_mut`: `libcore/ops/deref.rs`
- - `index`: `libcore/ops/index.rs`
- - `index_mut`: `libcore/ops/index.rs`
- - `add`: `libcore/ops/arith.rs`
- - `sub`: `libcore/ops/arith.rs`
- - `mul`: `libcore/ops/arith.rs`
- - `div`: `libcore/ops/arith.rs`
- - `rem`: `libcore/ops/arith.rs`
- - `neg`: `libcore/ops/arith.rs`
- - `add_assign`: `libcore/ops/arith.rs`
- - `sub_assign`: `libcore/ops/arith.rs`
- - `mul_assign`: `libcore/ops/arith.rs`
- - `div_assign`: `libcore/ops/arith.rs`
- - `rem_assign`: `libcore/ops/arith.rs`
- - `eq`: `libcore/cmp.rs`
- - `ord`: `libcore/cmp.rs`
-- Functions
- - `fn`: `libcore/ops/function.rs`
- - `fn_mut`: `libcore/ops/function.rs`
- - `fn_once`: `libcore/ops/function.rs`
- - `generator_state`: `libcore/ops/generator.rs`
- - `generator`: `libcore/ops/generator.rs`
-- Other
- - `coerce_unsized`: `libcore/ops/unsize.rs`
- - `drop`: `libcore/ops/drop.rs`
- - `drop_in_place`: `libcore/ptr.rs`
- - `clone`: `libcore/clone.rs`
- - `copy`: `libcore/marker.rs`
- - `send`: `libcore/marker.rs`
- - `sized`: `libcore/marker.rs`
- - `unsize`: `libcore/marker.rs`
- - `sync`: `libcore/marker.rs`
- - `phantom_data`: `libcore/marker.rs`
- - `discriminant_kind`: `libcore/marker.rs`
- - `freeze`: `libcore/marker.rs`
- - `debug_trait`: `libcore/fmt/mod.rs`
- - `non_zero`: `libcore/nonzero.rs`
- - `arc`: `liballoc/sync.rs`
- - `rc`: `liballoc/rc.rs`
+------------------------
"##,
},
Lint {
- label: "libstd_sys_internals",
- description: r##"# `libstd_sys_internals`
+ label: "liballoc_internals",
+ description: r##"# `liballoc_internals`
-This feature is internal to the Rust compiler and is not intended for general use.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
"##,
},
Lint {
- label: "libstd_thread_internals",
- description: r##"# `libstd_thread_internals`
+ label: "libstd_sys_internals",
+ description: r##"# `libstd_sys_internals`
This feature is internal to the Rust compiler and is not intended for general use.
@@ -3072,197 +6059,124 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "llvm_asm",
- description: r##"# `llvm_asm`
+ label: "link_llvm_intrinsics",
+ description: r##"# `link_llvm_intrinsics`
-The tracking issue for this feature is: [#70173]
+The tracking issue for this feature is: [#29602]
-[#70173]: https://github.com/rust-lang/rust/issues/70173
+[#29602]: https://github.com/rust-lang/rust/issues/29602
------------------------
+"##,
+ },
+ Lint {
+ label: "linkage",
+ description: r##"# `linkage`
-For extremely low-level manipulations and performance reasons, one
-might wish to control the CPU directly. Rust supports using inline
-assembly to do this via the `llvm_asm!` macro.
-
-```rust,ignore (pseudo-code)
-llvm_asm!(assembly template
- : output operands
- : input operands
- : clobbers
- : options
- );
-```
-
-Any use of `llvm_asm` is feature gated (requires `#![feature(llvm_asm)]` on the
-crate to allow) and of course requires an `unsafe` block.
-
-> **Note**: the examples here are given in x86/x86-64 assembly, but
-> all platforms are supported.
-
-## Assembly template
+The tracking issue for this feature is: [#29603]
-The `assembly template` is the only required parameter and must be a
-literal string (i.e. `""`)
+[#29603]: https://github.com/rust-lang/rust/issues/29603
-```rust
-#![feature(llvm_asm)]
+------------------------
+"##,
+ },
+ Lint {
+ label: "linked_list_cursors",
+ description: r##"# `linked_list_cursors`
-#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-fn foo() {
- unsafe {
- llvm_asm!("NOP");
- }
-}
+The tracking issue for this feature is: [#58533]
-// Other platforms:
-#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-fn foo() { /* ... */ }
+[#58533]: https://github.com/rust-lang/rust/issues/58533
-fn main() {
- // ...
- foo();
- // ...
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "linked_list_remove",
+ description: r##"# `linked_list_remove`
-(The `feature(llvm_asm)` and `#[cfg]`s are omitted from now on.)
+The tracking issue for this feature is: [#69210]
-Output operands, input operands, clobbers and options are all optional
-but you must add the right number of `:` if you skip them:
+[#69210]: https://github.com/rust-lang/rust/issues/69210
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() { unsafe {
-llvm_asm!("xor %eax, %eax"
- :
- :
- : "eax"
- );
-# } }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "lint_reasons",
+ description: r##"# `lint_reasons`
-Whitespace also doesn't matter:
+The tracking issue for this feature is: [#54503]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() { unsafe {
-llvm_asm!("xor %eax, %eax" ::: "eax");
-# } }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+[#54503]: https://github.com/rust-lang/rust/issues/54503
-## Operands
+------------------------
+"##,
+ },
+ Lint {
+ label: "linux_pidfd",
+ description: r##"# `linux_pidfd`
-Input and output operands follow the same format: `:
-"constraints1"(expr1), "constraints2"(expr2), ..."`. Output operand
-expressions must be mutable place, or not yet assigned:
+The tracking issue for this feature is: [#82971]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-fn add(a: i32, b: i32) -> i32 {
- let c: i32;
- unsafe {
- llvm_asm!("add $2, $0"
- : "=r"(c)
- : "0"(a), "r"(b)
- );
- }
- c
-}
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn add(a: i32, b: i32) -> i32 { a + b }
+[#82971]: https://github.com/rust-lang/rust/issues/82971
-fn main() {
- assert_eq!(add(3, 14159), 14162)
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "log_syntax",
+ description: r##"# `log_syntax`
-If you would like to use real operands in this position, however,
-you are required to put curly braces `{}` around the register that
-you want, and you are required to put the specific size of the
-operand. This is useful for very low level programming, where
-which register you use is important:
+The tracking issue for this feature is: [#29598]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# unsafe fn read_byte_in(port: u16) -> u8 {
-let result: u8;
-llvm_asm!("in %dx, %al" : "={al}"(result) : "{dx}"(port));
-result
-# }
-```
+[#29598]: https://github.com/rust-lang/rust/issues/29598
-## Clobbers
+------------------------
+"##,
+ },
+ Lint {
+ label: "macro_metavar_expr",
+ description: r##"# `macro_metavar_expr`
-Some instructions modify registers which might otherwise have held
-different values so we use the clobbers list to indicate to the
-compiler not to assume any values loaded into those registers will
-stay valid.
+The tracking issue for this feature is: [#83527]
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() { unsafe {
-// Put the value 0x200 in eax:
-llvm_asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "eax");
-# } }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+[#83527]: https://github.com/rust-lang/rust/issues/83527
-Input and output registers need not be listed since that information
-is already communicated by the given constraints. Otherwise, any other
-registers used either implicitly or explicitly should be listed.
+------------------------
+"##,
+ },
+ Lint {
+ label: "map_entry_replace",
+ description: r##"# `map_entry_replace`
-If the assembly changes the condition code register `cc` should be
-specified as one of the clobbers. Similarly, if the assembly modifies
-memory, `memory` should also be specified.
+The tracking issue for this feature is: [#44286]
-## Options
+[#44286]: https://github.com/rust-lang/rust/issues/44286
-The last section, `options` is specific to Rust. The format is comma
-separated literal strings (i.e. `:"foo", "bar", "baz"`). It's used to
-specify some extra info about the inline assembly:
+------------------------
+"##,
+ },
+ Lint {
+ label: "map_many_mut",
+ description: r##"# `map_many_mut`
-Current valid options are:
+The tracking issue for this feature is: [#97601]
-1. `volatile` - specifying this is analogous to
- `__asm__ __volatile__ (...)` in gcc/clang.
-2. `alignstack` - certain instructions expect the stack to be
- aligned a certain way (i.e. SSE) and specifying this indicates to
- the compiler to insert its usual stack alignment code
-3. `intel` - use intel syntax instead of the default AT&T.
+[#97601]: https://github.com/rust-lang/rust/issues/97601
-```rust
-# #![feature(llvm_asm)]
-# #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
-# fn main() {
-let result: i32;
-unsafe {
- llvm_asm!("mov eax, 2" : "={eax}"(result) : : : "intel")
-}
-println!("eax is currently {}", result);
-# }
-# #[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
-# fn main() {}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "map_try_insert",
+ description: r##"# `map_try_insert`
-## More Information
+The tracking issue for this feature is: [#82766]
-The current implementation of the `llvm_asm!` macro is a direct binding to [LLVM's
-inline assembler expressions][llvm-docs], so be sure to check out [their
-documentation as well][llvm-docs] for more information about clobbers,
-constraints, etc.
+[#82766]: https://github.com/rust-lang/rust/issues/82766
-[llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
+------------------------
"##,
},
Lint {
@@ -3305,6 +6219,116 @@ feature, which applied to all empty traits (without needing an opt-in).
"##,
},
Lint {
+ label: "maybe_uninit_array_assume_init",
+ description: r##"# `maybe_uninit_array_assume_init`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_as_bytes",
+ description: r##"# `maybe_uninit_as_bytes`
+
+The tracking issue for this feature is: [#93092]
+
+[#93092]: https://github.com/rust-lang/rust/issues/93092
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_slice",
+ description: r##"# `maybe_uninit_slice`
+
+The tracking issue for this feature is: [#63569]
+
+[#63569]: https://github.com/rust-lang/rust/issues/63569
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_uninit_array",
+ description: r##"# `maybe_uninit_uninit_array`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_uninit_array_transpose",
+ description: r##"# `maybe_uninit_uninit_array_transpose`
+
+The tracking issue for this feature is: [#96097]
+
+[#96097]: https://github.com/rust-lang/rust/issues/96097
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "maybe_uninit_write_slice",
+ description: r##"# `maybe_uninit_write_slice`
+
+The tracking issue for this feature is: [#79995]
+
+[#79995]: https://github.com/rust-lang/rust/issues/79995
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "mem_copy_fn",
+ description: r##"# `mem_copy_fn`
+
+The tracking issue for this feature is: [#98262]
+
+[#98262]: https://github.com/rust-lang/rust/issues/98262
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "min_specialization",
+ description: r##"# `min_specialization`
+
+The tracking issue for this feature is: [#31844]
+
+[#31844]: https://github.com/rust-lang/rust/issues/31844
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "mips_target_feature",
+ description: r##"# `mips_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "more_float_constants",
+ description: r##"# `more_float_constants`
+
+The tracking issue for this feature is: [#103883]
+
+[#103883]: https://github.com/rust-lang/rust/issues/103883
+
+------------------------
+"##,
+ },
+ Lint {
label: "more_qualified_paths",
description: r##"# `more_qualified_paths`
@@ -3338,68 +6362,61 @@ impl A for Foo {
"##,
},
Lint {
- label: "native_link_modifiers",
- description: r##"# `native_link_modifiers`
+ label: "multiple_supertrait_upcastable",
+ description: r##"# `multiple_supertrait_upcastable`
-The tracking issue for this feature is: [#81490]
-
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
------------------------
-
-The `native_link_modifiers` feature allows you to use the `modifiers` syntax with the `#[link(..)]` attribute.
-
-Modifiers are specified as a comma-delimited string with each modifier prefixed with either a `+` or `-` to indicate that the modifier is enabled or disabled, respectively. The last boolean value specified for a given modifier wins.
"##,
},
Lint {
- label: "native_link_modifiers_as_needed",
- description: r##"# `native_link_modifiers_as_needed`
+ label: "must_not_suspend",
+ description: r##"# `must_not_suspend`
-The tracking issue for this feature is: [#81490]
+The tracking issue for this feature is: [#83310]
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+[#83310]: https://github.com/rust-lang/rust/issues/83310
------------------------
-
-The `native_link_modifiers_as_needed` feature allows you to use the `as-needed` modifier.
-
-`as-needed` is only compatible with the `dynamic` and `framework` linking kinds. Using any other kind will result in a compiler error.
-
-`+as-needed` means that the library will be actually linked only if it satisfies some undefined symbols at the point at which it is specified on the command line, making it similar to static libraries in this regard.
-
-This modifier translates to `--as-needed` for ld-like linkers, and to `-dead_strip_dylibs` / `-needed_library` / `-needed_framework` for ld64.
-The modifier does nothing for linkers that don't support it (e.g. `link.exe`).
-
-The default for this modifier is unclear, some targets currently specify it as `+as-needed`, some do not. We may want to try making `+as-needed` a default for all targets.
"##,
},
Lint {
- label: "native_link_modifiers_bundle",
- description: r##"# `native_link_modifiers_bundle`
+ label: "mutex_unlock",
+ description: r##"# `mutex_unlock`
-The tracking issue for this feature is: [#81490]
+The tracking issue for this feature is: [#81872]
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+[#81872]: https://github.com/rust-lang/rust/issues/81872
------------------------
+"##,
+ },
+ Lint {
+ label: "mutex_unpoison",
+ description: r##"# `mutex_unpoison`
-The `native_link_modifiers_bundle` feature allows you to use the `bundle` modifier.
+The tracking issue for this feature is: [#96469]
-Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
+[#96469]: https://github.com/rust-lang/rust/issues/96469
-`+bundle` means objects from the static library are bundled into the produced crate (a rlib, for example) and are used from this crate later during linking of the final binary.
+------------------------
+"##,
+ },
+ Lint {
+ label: "naked_functions",
+ description: r##"# `naked_functions`
-`-bundle` means the static library is included into the produced rlib "by name" and object files from it are included only during linking of the final binary, the file search by that name is also performed during final linking.
+The tracking issue for this feature is: [#32408]
-This modifier is supposed to supersede the `static-nobundle` linking kind defined by [RFC 1717](https://github.com/rust-lang/rfcs/pull/1717).
+[#32408]: https://github.com/rust-lang/rust/issues/32408
-The default for this modifier is currently `+bundle`, but it could be changed later on some future edition boundary.
+------------------------
"##,
},
Lint {
- label: "native_link_modifiers_verbatim",
- description: r##"# `native_link_modifiers_verbatim`
+ label: "native_link_modifiers_as_needed",
+ description: r##"# `native_link_modifiers_as_needed`
The tracking issue for this feature is: [#81490]
@@ -3407,40 +6424,36 @@ The tracking issue for this feature is: [#81490]
------------------------
-The `native_link_modifiers_verbatim` feature allows you to use the `verbatim` modifier.
+The `native_link_modifiers_as_needed` feature allows you to use the `as-needed` modifier.
-`+verbatim` means that rustc itself won't add any target-specified library prefixes or suffixes (like `lib` or `.a`) to the library name, and will try its best to ask for the same thing from the linker.
+`as-needed` is only compatible with the `dynamic` and `framework` linking kinds. Using any other kind will result in a compiler error.
-For `ld`-like linkers rustc will use the `-l:filename` syntax (note the colon) when passing the library, so the linker won't add any prefixes or suffixes as well.
-See [`-l namespec`](https://sourceware.org/binutils/docs/ld/Options.html) in ld documentation for more details.
-For linkers not supporting any verbatim modifiers (e.g. `link.exe` or `ld64`) the library name will be passed as is.
+`+as-needed` means that the library will be actually linked only if it satisfies some undefined symbols at the point at which it is specified on the command line, making it similar to static libraries in this regard.
-The default for this modifier is `-verbatim`.
+This modifier translates to `--as-needed` for ld-like linkers, and to `-dead_strip_dylibs` / `-needed_library` / `-needed_framework` for ld64.
+The modifier does nothing for linkers that don't support it (e.g. `link.exe`).
-This RFC changes the behavior of `raw-dylib` linking kind specified by [RFC 2627](https://github.com/rust-lang/rfcs/pull/2627). The `.dll` suffix (or other target-specified suffixes for other targets) is now added automatically.
-If your DLL doesn't have the `.dll` suffix, it can be specified with `+verbatim`.
+The default for this modifier is unclear, some targets currently specify it as `+as-needed`, some do not. We may want to try making `+as-needed` a default for all targets.
"##,
},
Lint {
- label: "native_link_modifiers_whole_archive",
- description: r##"# `native_link_modifiers_whole_archive`
+ label: "needs_panic_runtime",
+ description: r##"# `needs_panic_runtime`
-The tracking issue for this feature is: [#81490]
+The tracking issue for this feature is: [#32837]
-[#81490]: https://github.com/rust-lang/rust/issues/81490
+[#32837]: https://github.com/rust-lang/rust/issues/32837
------------------------
+"##,
+ },
+ Lint {
+ label: "negative_bounds",
+ description: r##"# `negative_bounds`
-The `native_link_modifiers_whole_archive` feature allows you to use the `whole-archive` modifier.
-
-Only compatible with the `static` linking kind. Using any other kind will result in a compiler error.
-
-`+whole-archive` means that the static library is linked as a whole archive without throwing any object files away.
-
-This modifier translates to `--whole-archive` for `ld`-like linkers, to `/WHOLEARCHIVE` for `link.exe`, and to `-force_load` for `ld64`.
-The modifier does nothing for linkers that don't support it.
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
-The default for this modifier is `-whole-archive`.
+------------------------
"##,
},
Lint {
@@ -3505,37 +6518,47 @@ This serves two purposes:
"##,
},
Lint {
- label: "coverage",
- description: r##"# `coverage`
+ label: "never_type",
+ description: r##"# `never_type`
-The tracking issue for this feature is: [#84605]
+The tracking issue for this feature is: [#35121]
-[#84605]: https://github.com/rust-lang/rust/issues/84605
+[#35121]: https://github.com/rust-lang/rust/issues/35121
----
+------------------------
+"##,
+ },
+ Lint {
+ label: "never_type_fallback",
+ description: r##"# `never_type_fallback`
-The `coverage` attribute can be used to selectively disable coverage
-instrumentation in an annotated function. This might be useful to:
+The tracking issue for this feature is: [#65992]
-- Avoid instrumentation overhead in a performance critical function
-- Avoid generating coverage for a function that is not meant to be executed,
- but still target 100% coverage for the rest of the program.
+[#65992]: https://github.com/rust-lang/rust/issues/65992
-## Example
+------------------------
+"##,
+ },
+ Lint {
+ label: "new_uninit",
+ description: r##"# `new_uninit`
-```rust
-#![feature(coverage)]
+The tracking issue for this feature is: [#63291]
-// `foo()` will get coverage instrumentation (by default)
-fn foo() {
- // ...
-}
+[#63291]: https://github.com/rust-lang/rust/issues/63291
-#[coverage(off)]
-fn bar() {
- // ...
-}
-```
+------------------------
+"##,
+ },
+ Lint {
+ label: "no_core",
+ description: r##"# `no_core`
+
+The tracking issue for this feature is: [#29639]
+
+[#29639]: https://github.com/rust-lang/rust/issues/29639
+
+------------------------
"##,
},
Lint {
@@ -3572,6 +6595,319 @@ fn foo() {
"##,
},
Lint {
+ label: "non_exhaustive_omitted_patterns_lint",
+ description: r##"# `non_exhaustive_omitted_patterns_lint`
+
+The tracking issue for this feature is: [#89554]
+
+[#89554]: https://github.com/rust-lang/rust/issues/89554
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "non_lifetime_binders",
+ description: r##"# `non_lifetime_binders`
+
+The tracking issue for this feature is: [#108185]
+
+[#108185]: https://github.com/rust-lang/rust/issues/108185
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "nonzero_ops",
+ description: r##"# `nonzero_ops`
+
+The tracking issue for this feature is: [#84186]
+
+[#84186]: https://github.com/rust-lang/rust/issues/84186
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "noop_waker",
+ description: r##"# `noop_waker`
+
+The tracking issue for this feature is: [#98286]
+
+[#98286]: https://github.com/rust-lang/rust/issues/98286
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "num_midpoint",
+ description: r##"# `num_midpoint`
+
+The tracking issue for this feature is: [#110840]
+
+[#110840]: https://github.com/rust-lang/rust/issues/110840
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "numfmt",
+ description: r##"# `numfmt`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "object_safe_for_dispatch",
+ description: r##"# `object_safe_for_dispatch`
+
+The tracking issue for this feature is: [#43561]
+
+[#43561]: https://github.com/rust-lang/rust/issues/43561
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "offset_of",
+ description: r##"# `offset_of`
+
+The tracking issue for this feature is: [#106655]
+
+[#106655]: https://github.com/rust-lang/rust/issues/106655
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "omit_gdb_pretty_printer_section",
+ description: r##"# `omit_gdb_pretty_printer_section`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "once_cell_try",
+ description: r##"# `once_cell_try`
+
+The tracking issue for this feature is: [#109737]
+
+[#109737]: https://github.com/rust-lang/rust/issues/109737
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "one_sided_range",
+ description: r##"# `one_sided_range`
+
+The tracking issue for this feature is: [#69780]
+
+[#69780]: https://github.com/rust-lang/rust/issues/69780
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "optimize_attribute",
+ description: r##"# `optimize_attribute`
+
+The tracking issue for this feature is: [#54882]
+
+[#54882]: https://github.com/rust-lang/rust/issues/54882
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "option_get_or_insert_default",
+ description: r##"# `option_get_or_insert_default`
+
+The tracking issue for this feature is: [#82901]
+
+[#82901]: https://github.com/rust-lang/rust/issues/82901
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "option_take_if",
+ description: r##"# `option_take_if`
+
+The tracking issue for this feature is: [#98934]
+
+[#98934]: https://github.com/rust-lang/rust/issues/98934
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "option_zip",
+ description: r##"# `option_zip`
+
+The tracking issue for this feature is: [#70086]
+
+[#70086]: https://github.com/rust-lang/rust/issues/70086
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_abort",
+ description: r##"# `panic_abort`
+
+The tracking issue for this feature is: [#32837]
+
+[#32837]: https://github.com/rust-lang/rust/issues/32837
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_always_abort",
+ description: r##"# `panic_always_abort`
+
+The tracking issue for this feature is: [#84438]
+
+[#84438]: https://github.com/rust-lang/rust/issues/84438
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_backtrace_config",
+ description: r##"# `panic_backtrace_config`
+
+The tracking issue for this feature is: [#93346]
+
+[#93346]: https://github.com/rust-lang/rust/issues/93346
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_can_unwind",
+ description: r##"# `panic_can_unwind`
+
+The tracking issue for this feature is: [#92988]
+
+[#92988]: https://github.com/rust-lang/rust/issues/92988
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_info_message",
+ description: r##"# `panic_info_message`
+
+The tracking issue for this feature is: [#66745]
+
+[#66745]: https://github.com/rust-lang/rust/issues/66745
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_internals",
+ description: r##"# `panic_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_runtime",
+ description: r##"# `panic_runtime`
+
+The tracking issue for this feature is: [#32837]
+
+[#32837]: https://github.com/rust-lang/rust/issues/32837
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_unwind",
+ description: r##"# `panic_unwind`
+
+The tracking issue for this feature is: [#32837]
+
+[#32837]: https://github.com/rust-lang/rust/issues/32837
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "panic_update_hook",
+ description: r##"# `panic_update_hook`
+
+The tracking issue for this feature is: [#92649]
+
+[#92649]: https://github.com/rust-lang/rust/issues/92649
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "path_file_prefix",
+ description: r##"# `path_file_prefix`
+
+The tracking issue for this feature is: [#86319]
+
+[#86319]: https://github.com/rust-lang/rust/issues/86319
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pattern",
+ description: r##"# `pattern`
+
+The tracking issue for this feature is: [#27721]
+
+[#27721]: https://github.com/rust-lang/rust/issues/27721
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "peer_credentials_unix_socket",
+ description: r##"# `peer_credentials_unix_socket`
+
+The tracking issue for this feature is: [#42839]
+
+[#42839]: https://github.com/rust-lang/rust/issues/42839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pin_deref_mut",
+ description: r##"# `pin_deref_mut`
+
+The tracking issue for this feature is: [#86918]
+
+[#86918]: https://github.com/rust-lang/rust/issues/86918
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "platform_intrinsics",
+ description: r##"# `platform_intrinsics`
+
+The tracking issue for this feature is: [#27731]
+
+[#27731]: https://github.com/rust-lang/rust/issues/27731
+
+------------------------
+"##,
+ },
+ Lint {
label: "plugin",
description: r##"# `plugin`
@@ -3608,24 +6944,24 @@ of a library.
Plugins can extend [Rust's lint
infrastructure](../../reference/attributes/diagnostics.md#lint-check-attributes) with
additional checks for code style, safety, etc. Now let's write a plugin
-[`lint-plugin-test.rs`](https://github.com/rust-lang/rust/blob/master/tests/ui-fulldeps/auxiliary/lint-plugin-test.rs)
+[`lint-plugin-test.rs`](https://github.com/rust-lang/rust/blob/master/tests/ui-fulldeps/plugin/auxiliary/lint-plugin-test.rs)
that warns about any item named `lintme`.
```rust,ignore (requires-stage-2)
-#![feature(box_syntax, rustc_private)]
+#![feature(rustc_private)]
extern crate rustc_ast;
// Load rustc as a plugin to get macros
extern crate rustc_driver;
-#[macro_use]
extern crate rustc_lint;
#[macro_use]
extern crate rustc_session;
-use rustc_driver::plugin::Registry;
-use rustc_lint::{EarlyContext, EarlyLintPass, LintArray, LintContext, LintPass};
use rustc_ast::ast;
+use rustc_driver::plugin::Registry;
+use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
+
declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
declare_lint_pass!(Pass => [TEST_LINT]);
@@ -3633,9 +6969,7 @@ declare_lint_pass!(Pass => [TEST_LINT]);
impl EarlyLintPass for Pass {
fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
if it.ident.name.as_str() == "lintme" {
- cx.lint(TEST_LINT, |lint| {
- lint.build("item is named 'lintme'").set_span(it.span).emit()
- });
+ cx.lint(TEST_LINT, "item is named 'lintme'", |lint| lint.set_span(it.span));
}
}
}
@@ -3643,7 +6977,7 @@ impl EarlyLintPass for Pass {
#[no_mangle]
fn __rustc_plugin_registrar(reg: &mut Registry) {
reg.lint_store.register_lints(&[&TEST_LINT]);
- reg.lint_store.register_early_pass(|| box Pass);
+ reg.lint_store.register_early_pass(|| Box::new(Pass));
}
```
@@ -3677,7 +7011,7 @@ The components of a lint plugin are:
Lint passes are syntax traversals, but they run at a late stage of compilation
where type information is available. `rustc`'s [built-in
-lints](https://github.com/rust-lang/rust/blob/master/src/librustc_session/lint/builtin.rs)
+lints](https://github.com/rust-lang/rust/blob/master/compiler/rustc_lint_defs/src/builtin.rs)
mostly use the same infrastructure as lint plugins, and provide examples of how
to access type information.
@@ -3692,6 +7026,88 @@ including those provided by plugins loaded by `foo.rs`.
"##,
},
Lint {
+ label: "pointer_byte_offsets",
+ description: r##"# `pointer_byte_offsets`
+
+The tracking issue for this feature is: [#96283]
+
+[#96283]: https://github.com/rust-lang/rust/issues/96283
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pointer_is_aligned",
+ description: r##"# `pointer_is_aligned`
+
+The tracking issue for this feature is: [#96284]
+
+[#96284]: https://github.com/rust-lang/rust/issues/96284
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pointer_like_trait",
+ description: r##"# `pointer_like_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "portable_simd",
+ description: r##"# `portable_simd`
+
+The tracking issue for this feature is: [#86656]
+
+[#86656]: https://github.com/rust-lang/rust/issues/86656
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "powerpc_target_feature",
+ description: r##"# `powerpc_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "precise_pointer_size_matching",
+ description: r##"# `precise_pointer_size_matching`
+
+The tracking issue for this feature is: [#56354]
+
+[#56354]: https://github.com/rust-lang/rust/issues/56354
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "prelude_2024",
+ description: r##"# `prelude_2024`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "prelude_import",
+ description: r##"# `prelude_import`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "print_internals",
description: r##"# `print_internals`
@@ -3701,6 +7117,123 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "proc_macro_byte_character",
+ description: r##"# `proc_macro_byte_character`
+
+The tracking issue for this feature is: [#115268]
+
+[#115268]: https://github.com/rust-lang/rust/issues/115268
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_def_site",
+ description: r##"# `proc_macro_def_site`
+
+The tracking issue for this feature is: [#54724]
+
+[#54724]: https://github.com/rust-lang/rust/issues/54724
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_diagnostic",
+ description: r##"# `proc_macro_diagnostic`
+
+The tracking issue for this feature is: [#54140]
+
+[#54140]: https://github.com/rust-lang/rust/issues/54140
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_expand",
+ description: r##"# `proc_macro_expand`
+
+The tracking issue for this feature is: [#90765]
+
+[#90765]: https://github.com/rust-lang/rust/issues/90765
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_hygiene",
+ description: r##"# `proc_macro_hygiene`
+
+The tracking issue for this feature is: [#54727]
+
+[#54727]: https://github.com/rust-lang/rust/issues/54727
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_internals",
+ description: r##"# `proc_macro_internals`
+
+The tracking issue for this feature is: [#27812]
+
+[#27812]: https://github.com/rust-lang/rust/issues/27812
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_quote",
+ description: r##"# `proc_macro_quote`
+
+The tracking issue for this feature is: [#54722]
+
+[#54722]: https://github.com/rust-lang/rust/issues/54722
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_span",
+ description: r##"# `proc_macro_span`
+
+The tracking issue for this feature is: [#54725]
+
+[#54725]: https://github.com/rust-lang/rust/issues/54725
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "proc_macro_tracked_env",
+ description: r##"# `proc_macro_tracked_env`
+
+The tracking issue for this feature is: [#99515]
+
+[#99515]: https://github.com/rust-lang/rust/issues/99515
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "process_exitcode_internals",
+ description: r##"# `process_exitcode_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "process_internals",
+ description: r##"# `process_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
label: "profiler_runtime",
description: r##"# `profiler_runtime`
@@ -3719,41 +7252,204 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
- label: "raw_dylib",
- description: r##"# `raw_dylib`
+ label: "ptr_addr_eq",
+ description: r##"# `ptr_addr_eq`
-The tracking issue for this feature is: [#58713]
+The tracking issue for this feature is: [#116324]
-[#58713]: https://github.com/rust-lang/rust/issues/58713
+[#116324]: https://github.com/rust-lang/rust/issues/116324
------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_alignment_type",
+ description: r##"# `ptr_alignment_type`
-The `raw_dylib` feature allows you to link against the implementations of functions in an `extern`
-block without, on Windows, linking against an import library.
+The tracking issue for this feature is: [#102070]
-```rust,ignore (partial-example)
-#![feature(raw_dylib)]
+[#102070]: https://github.com/rust-lang/rust/issues/102070
-#[link(name="library", kind="raw-dylib")]
-extern {
- fn extern_function(x: i32);
-}
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_as_uninit",
+ description: r##"# `ptr_as_uninit`
-fn main() {
- unsafe {
- extern_function(14);
- }
-}
-```
+The tracking issue for this feature is: [#75402]
+
+[#75402]: https://github.com/rust-lang/rust/issues/75402
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_from_ref",
+ description: r##"# `ptr_from_ref`
+
+The tracking issue for this feature is: [#106116]
+
+[#106116]: https://github.com/rust-lang/rust/issues/106116
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_internals",
+ description: r##"# `ptr_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_mask",
+ description: r##"# `ptr_mask`
+
+The tracking issue for this feature is: [#98290]
+
+[#98290]: https://github.com/rust-lang/rust/issues/98290
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_metadata",
+ description: r##"# `ptr_metadata`
+
+The tracking issue for this feature is: [#81513]
+
+[#81513]: https://github.com/rust-lang/rust/issues/81513
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_sub_ptr",
+ description: r##"# `ptr_sub_ptr`
+
+The tracking issue for this feature is: [#95892]
+
+[#95892]: https://github.com/rust-lang/rust/issues/95892
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ptr_to_from_bits",
+ description: r##"# `ptr_to_from_bits`
+
+The tracking issue for this feature is: [#91126]
+
+[#91126]: https://github.com/rust-lang/rust/issues/91126
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "pub_crate_should_not_need_unstable_attr",
+ description: r##"# `pub_crate_should_not_need_unstable_attr`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_os_error_ty",
+ description: r##"# `raw_os_error_ty`
+
+The tracking issue for this feature is: [#107792]
+
+[#107792]: https://github.com/rust-lang/rust/issues/107792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_os_nonzero",
+ description: r##"# `raw_os_nonzero`
+
+The tracking issue for this feature is: [#82363]
+
+[#82363]: https://github.com/rust-lang/rust/issues/82363
-## Limitations
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_ref_op",
+ description: r##"# `raw_ref_op`
+
+The tracking issue for this feature is: [#64490]
+
+[#64490]: https://github.com/rust-lang/rust/issues/64490
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_slice_split",
+ description: r##"# `raw_slice_split`
+
+The tracking issue for this feature is: [#95595]
-Currently, this feature is only supported on `-windows-msvc` targets. Non-Windows platforms don't have import
-libraries, and an incompatibility between LLVM and the BFD linker means that it is not currently supported on
-`-windows-gnu` targets.
+[#95595]: https://github.com/rust-lang/rust/issues/95595
-On the `i686-pc-windows-msvc` target, this feature supports only the `cdecl`, `stdcall`, `system`, and `fastcall`
-calling conventions.
+------------------------
+"##,
+ },
+ Lint {
+ label: "raw_vec_internals",
+ description: r##"# `raw_vec_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "read_buf",
+ description: r##"# `read_buf`
+
+The tracking issue for this feature is: [#78485]
+
+[#78485]: https://github.com/rust-lang/rust/issues/78485
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "ready_into_inner",
+ description: r##"# `ready_into_inner`
+
+The tracking issue for this feature is: [#101196]
+
+[#101196]: https://github.com/rust-lang/rust/issues/101196
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "receiver_trait",
+ description: r##"# `receiver_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "register_tool",
+ description: r##"# `register_tool`
+
+The tracking issue for this feature is: [#66079]
+
+[#66079]: https://github.com/rust-lang/rust/issues/66079
+
+------------------------
"##,
},
Lint {
@@ -3779,6 +7475,103 @@ enum Foo {
"##,
},
Lint {
+ label: "repr_simd",
+ description: r##"# `repr_simd`
+
+The tracking issue for this feature is: [#27731]
+
+[#27731]: https://github.com/rust-lang/rust/issues/27731
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "restricted_std",
+ description: r##"# `restricted_std`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "result_flattening",
+ description: r##"# `result_flattening`
+
+The tracking issue for this feature is: [#70142]
+
+[#70142]: https://github.com/rust-lang/rust/issues/70142
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "result_option_inspect",
+ description: r##"# `result_option_inspect`
+
+The tracking issue for this feature is: [#91345]
+
+[#91345]: https://github.com/rust-lang/rust/issues/91345
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "return_position_impl_trait_in_trait",
+ description: r##"# `return_position_impl_trait_in_trait`
+
+The tracking issue for this feature is: [#91611]
+
+[#91611]: https://github.com/rust-lang/rust/issues/91611
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "return_type_notation",
+ description: r##"# `return_type_notation`
+
+The tracking issue for this feature is: [#109417]
+
+[#109417]: https://github.com/rust-lang/rust/issues/109417
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "riscv_target_feature",
+ description: r##"# `riscv_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "round_char_boundary",
+ description: r##"# `round_char_boundary`
+
+The tracking issue for this feature is: [#93743]
+
+[#93743]: https://github.com/rust-lang/rust/issues/93743
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "round_ties_even",
+ description: r##"# `round_ties_even`
+
+The tracking issue for this feature is: [#96710]
+
+[#96710]: https://github.com/rust-lang/rust/issues/96710
+
+------------------------
+"##,
+ },
+ Lint {
label: "rt",
description: r##"# `rt`
@@ -3788,6 +7581,39 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "rtm_target_feature",
+ description: r##"# `rtm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rust_cold_cc",
+ description: r##"# `rust_cold_cc`
+
+The tracking issue for this feature is: [#97544]
+
+[#97544]: https://github.com/rust-lang/rust/issues/97544
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustc_allow_const_fn_unstable",
+ description: r##"# `rustc_allow_const_fn_unstable`
+
+The tracking issue for this feature is: [#69399]
+
+[#69399]: https://github.com/rust-lang/rust/issues/69399
+
+------------------------
+"##,
+ },
+ Lint {
label: "rustc_attrs",
description: r##"# `rustc_attrs`
@@ -3845,6 +7671,326 @@ error: aborting due to 2 previous errors
"##,
},
Lint {
+ label: "rustc_private",
+ description: r##"# `rustc_private`
+
+The tracking issue for this feature is: [#27812]
+
+[#27812]: https://github.com/rust-lang/rust/issues/27812
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustdoc_internals",
+ description: r##"# `rustdoc_internals`
+
+The tracking issue for this feature is: [#90418]
+
+[#90418]: https://github.com/rust-lang/rust/issues/90418
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "rustdoc_missing_doc_code_examples",
+ description: r##"# `rustdoc_missing_doc_code_examples`
+
+The tracking issue for this feature is: [#101730]
+
+[#101730]: https://github.com/rust-lang/rust/issues/101730
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sealed",
+ description: r##"# `sealed`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "seek_stream_len",
+ description: r##"# `seek_stream_len`
+
+The tracking issue for this feature is: [#59359]
+
+[#59359]: https://github.com/rust-lang/rust/issues/59359
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "set_ptr_value",
+ description: r##"# `set_ptr_value`
+
+The tracking issue for this feature is: [#75091]
+
+[#75091]: https://github.com/rust-lang/rust/issues/75091
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "setgroups",
+ description: r##"# `setgroups`
+
+The tracking issue for this feature is: [#90747]
+
+[#90747]: https://github.com/rust-lang/rust/issues/90747
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sgx_platform",
+ description: r##"# `sgx_platform`
+
+The tracking issue for this feature is: [#56975]
+
+[#56975]: https://github.com/rust-lang/rust/issues/56975
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "simd_ffi",
+ description: r##"# `simd_ffi`
+
+The tracking issue for this feature is: [#27731]
+
+[#27731]: https://github.com/rust-lang/rust/issues/27731
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sized_type_properties",
+ description: r##"# `sized_type_properties`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_as_chunks",
+ description: r##"# `slice_as_chunks`
+
+The tracking issue for this feature is: [#74985]
+
+[#74985]: https://github.com/rust-lang/rust/issues/74985
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_concat_ext",
+ description: r##"# `slice_concat_ext`
+
+The tracking issue for this feature is: [#27747]
+
+[#27747]: https://github.com/rust-lang/rust/issues/27747
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_concat_trait",
+ description: r##"# `slice_concat_trait`
+
+The tracking issue for this feature is: [#27747]
+
+[#27747]: https://github.com/rust-lang/rust/issues/27747
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_first_last_chunk",
+ description: r##"# `slice_first_last_chunk`
+
+The tracking issue for this feature is: [#111774]
+
+[#111774]: https://github.com/rust-lang/rust/issues/111774
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_flatten",
+ description: r##"# `slice_flatten`
+
+The tracking issue for this feature is: [#95629]
+
+[#95629]: https://github.com/rust-lang/rust/issues/95629
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_from_ptr_range",
+ description: r##"# `slice_from_ptr_range`
+
+The tracking issue for this feature is: [#89792]
+
+[#89792]: https://github.com/rust-lang/rust/issues/89792
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_group_by",
+ description: r##"# `slice_group_by`
+
+The tracking issue for this feature is: [#80552]
+
+[#80552]: https://github.com/rust-lang/rust/issues/80552
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_index_methods",
+ description: r##"# `slice_index_methods`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_internals",
+ description: r##"# `slice_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_iter_mut_as_mut_slice",
+ description: r##"# `slice_iter_mut_as_mut_slice`
+
+The tracking issue for this feature is: [#93079]
+
+[#93079]: https://github.com/rust-lang/rust/issues/93079
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_partition_dedup",
+ description: r##"# `slice_partition_dedup`
+
+The tracking issue for this feature is: [#54279]
+
+[#54279]: https://github.com/rust-lang/rust/issues/54279
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_pattern",
+ description: r##"# `slice_pattern`
+
+The tracking issue for this feature is: [#56345]
+
+[#56345]: https://github.com/rust-lang/rust/issues/56345
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_ptr_get",
+ description: r##"# `slice_ptr_get`
+
+The tracking issue for this feature is: [#74265]
+
+[#74265]: https://github.com/rust-lang/rust/issues/74265
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_ptr_len",
+ description: r##"# `slice_ptr_len`
+
+The tracking issue for this feature is: [#71146]
+
+[#71146]: https://github.com/rust-lang/rust/issues/71146
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_range",
+ description: r##"# `slice_range`
+
+The tracking issue for this feature is: [#76393]
+
+[#76393]: https://github.com/rust-lang/rust/issues/76393
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_split_at_unchecked",
+ description: r##"# `slice_split_at_unchecked`
+
+The tracking issue for this feature is: [#76014]
+
+[#76014]: https://github.com/rust-lang/rust/issues/76014
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_swap_unchecked",
+ description: r##"# `slice_swap_unchecked`
+
+The tracking issue for this feature is: [#88539]
+
+[#88539]: https://github.com/rust-lang/rust/issues/88539
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "slice_take",
+ description: r##"# `slice_take`
+
+The tracking issue for this feature is: [#62280]
+
+[#62280]: https://github.com/rust-lang/rust/issues/62280
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "solid_ext",
+ description: r##"# `solid_ext`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sort_floats",
+ description: r##"# `sort_floats`
+
+The tracking issue for this feature is: [#93396]
+
+[#93396]: https://github.com/rust-lang/rust/issues/93396
+
+------------------------
+"##,
+ },
+ Lint {
label: "sort_internals",
description: r##"# `sort_internals`
@@ -3854,6 +8000,184 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "spec_option_partial_eq",
+ description: r##"# `spec_option_partial_eq`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "specialization",
+ description: r##"# `specialization`
+
+The tracking issue for this feature is: [#31844]
+
+[#31844]: https://github.com/rust-lang/rust/issues/31844
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "split_array",
+ description: r##"# `split_array`
+
+The tracking issue for this feature is: [#90091]
+
+[#90091]: https://github.com/rust-lang/rust/issues/90091
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "split_as_slice",
+ description: r##"# `split_as_slice`
+
+The tracking issue for this feature is: [#96137]
+
+[#96137]: https://github.com/rust-lang/rust/issues/96137
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sse4a_target_feature",
+ description: r##"# `sse4a_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "staged_api",
+ description: r##"# `staged_api`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "start",
+ description: r##"# `start`
+
+The tracking issue for this feature is: [#29633]
+
+[#29633]: https://github.com/rust-lang/rust/issues/29633
+
+------------------------
+
+Allows you to mark a function as the entry point of the executable, which is
+necessary in `#![no_std]` environments.
+
+The function marked `#[start]` is passed the command line parameters in the same
+format as the C main function (aside from the integer types being used).
+It has to be non-generic and have the following signature:
+
+```rust,ignore (only-for-syntax-highlight)
+# let _:
+fn(isize, *const *const u8) -> isize
+# ;
+```
+
+This feature should not be confused with the `start` *lang item* which is
+defined by the `std` crate and is written `#[lang = "start"]`.
+
+## Usage together with the `std` crate
+
+`#[start]` can be used in combination with the `std` crate, in which case the
+normal `main` function (which would get called from the `std` crate) won't be
+used as an entry point.
+The initialization code in `std` will be skipped this way.
+
+Example:
+
+```rust
+#![feature(start)]
+
+#[start]
+fn start(_argc: isize, _argv: *const *const u8) -> isize {
+ 0
+}
+```
+
+Unwinding the stack past the `#[start]` function is currently considered
+Undefined Behavior (for any unwinding implementation):
+
+```rust,ignore (UB)
+#![feature(start)]
+
+#[start]
+fn start(_argc: isize, _argv: *const *const u8) -> isize {
+ std::panic::catch_unwind(|| {
+ panic!(); // panic safely gets caught or safely aborts execution
+ });
+
+ panic!(); // UB!
+
+ 0
+}
+```
+"##,
+ },
+ Lint {
+ label: "std_internals",
+ description: r##"# `std_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "stdio_makes_pipe",
+ description: r##"# `stdio_makes_pipe`
+
+The tracking issue for this feature is: [#98288]
+
+[#98288]: https://github.com/rust-lang/rust/issues/98288
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "stdsimd",
+ description: r##"# `stdsimd`
+
+The tracking issue for this feature is: [#48556]
+
+[#48556]: https://github.com/rust-lang/rust/issues/48556
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "step_trait",
+ description: r##"# `step_trait`
+
+The tracking issue for this feature is: [#42168]
+
+[#42168]: https://github.com/rust-lang/rust/issues/42168
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "stmt_expr_attributes",
+ description: r##"# `stmt_expr_attributes`
+
+The tracking issue for this feature is: [#15701]
+
+[#15701]: https://github.com/rust-lang/rust/issues/15701
+
+------------------------
+"##,
+ },
+ Lint {
label: "str_internals",
description: r##"# `str_internals`
@@ -3863,6 +8187,186 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "str_split_inclusive_remainder",
+ description: r##"# `str_split_inclusive_remainder`
+
+The tracking issue for this feature is: [#77998]
+
+[#77998]: https://github.com/rust-lang/rust/issues/77998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "str_split_remainder",
+ description: r##"# `str_split_remainder`
+
+The tracking issue for this feature is: [#77998]
+
+[#77998]: https://github.com/rust-lang/rust/issues/77998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "str_split_whitespace_remainder",
+ description: r##"# `str_split_whitespace_remainder`
+
+The tracking issue for this feature is: [#77998]
+
+[#77998]: https://github.com/rust-lang/rust/issues/77998
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "strict_provenance",
+ description: r##"# `strict_provenance`
+
+The tracking issue for this feature is: [#95228]
+
+[#95228]: https://github.com/rust-lang/rust/issues/95228
+-----
+
+The `strict_provenance` feature allows to enable the `fuzzy_provenance_casts` and `lossy_provenance_casts` lints.
+These lint on casts between integers and pointers, that are recommended against or invalid in the strict provenance model.
+The same feature gate is also used for the experimental strict provenance API in `std` (actually `core`).
+
+## Example
+
+```rust
+#![feature(strict_provenance)]
+#![warn(fuzzy_provenance_casts)]
+
+fn main() {
+ let _dangling = 16_usize as *const u8;
+ //~^ WARNING: strict provenance disallows casting integer `usize` to pointer `*const u8`
+}
+```
+"##,
+ },
+ Lint {
+ label: "strict_provenance_atomic_ptr",
+ description: r##"# `strict_provenance_atomic_ptr`
+
+The tracking issue for this feature is: [#99108]
+
+[#99108]: https://github.com/rust-lang/rust/issues/99108
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "string_deref_patterns",
+ description: r##"# `string_deref_patterns`
+
+The tracking issue for this feature is: [#87121]
+
+[#87121]: https://github.com/rust-lang/rust/issues/87121
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "string_extend_from_within",
+ description: r##"# `string_extend_from_within`
+
+The tracking issue for this feature is: [#103806]
+
+[#103806]: https://github.com/rust-lang/rust/issues/103806
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "string_remove_matches",
+ description: r##"# `string_remove_matches`
+
+The tracking issue for this feature is: [#72826]
+
+[#72826]: https://github.com/rust-lang/rust/issues/72826
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "structural_match",
+ description: r##"# `structural_match`
+
+The tracking issue for this feature is: [#31434]
+
+[#31434]: https://github.com/rust-lang/rust/issues/31434
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "sync_unsafe_cell",
+ description: r##"# `sync_unsafe_cell`
+
+The tracking issue for this feature is: [#95439]
+
+[#95439]: https://github.com/rust-lang/rust/issues/95439
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "target_feature_11",
+ description: r##"# `target_feature_11`
+
+The tracking issue for this feature is: [#69098]
+
+[#69098]: https://github.com/rust-lang/rust/issues/69098
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tbm_target_feature",
+ description: r##"# `tbm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tcp_linger",
+ description: r##"# `tcp_linger`
+
+The tracking issue for this feature is: [#88494]
+
+[#88494]: https://github.com/rust-lang/rust/issues/88494
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tcp_quickack",
+ description: r##"# `tcp_quickack`
+
+The tracking issue for this feature is: [#96256]
+
+[#96256]: https://github.com/rust-lang/rust/issues/96256
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tcplistener_into_incoming",
+ description: r##"# `tcplistener_into_incoming`
+
+The tracking issue for this feature is: [#88339]
+
+[#88339]: https://github.com/rust-lang/rust/issues/88339
+
+------------------------
+"##,
+ },
+ Lint {
label: "test",
description: r##"# `test`
@@ -4025,6 +8529,57 @@ even when using either of the above.
"##,
},
Lint {
+ label: "test_2018_feature",
+ description: r##"# `test_2018_feature`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "test_unstable_lint",
+ description: r##"# `test_unstable_lint`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thin_box",
+ description: r##"# `thin_box`
+
+The tracking issue for this feature is: [#92791]
+
+[#92791]: https://github.com/rust-lang/rust/issues/92791
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thread_id_value",
+ description: r##"# `thread_id_value`
+
+The tracking issue for this feature is: [#67939]
+
+[#67939]: https://github.com/rust-lang/rust/issues/67939
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thread_local",
+ description: r##"# `thread_local`
+
+The tracking issue for this feature is: [#29594]
+
+[#29594]: https://github.com/rust-lang/rust/issues/29594
+
+------------------------
+"##,
+ },
+ Lint {
label: "thread_local_internals",
description: r##"# `thread_local_internals`
@@ -4034,6 +8589,28 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "thread_sleep_until",
+ description: r##"# `thread_sleep_until`
+
+The tracking issue for this feature is: [#113752]
+
+[#113752]: https://github.com/rust-lang/rust/issues/113752
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "thread_spawn_unchecked",
+ description: r##"# `thread_spawn_unchecked`
+
+The tracking issue for this feature is: [#55132]
+
+[#55132]: https://github.com/rust-lang/rust/issues/55132
+
+------------------------
+"##,
+ },
+ Lint {
label: "trace_macros",
description: r##"# `trace_macros`
@@ -4077,6 +8654,17 @@ note: trace_macro
"##,
},
Lint {
+ label: "track_path",
+ description: r##"# `track_path`
+
+The tracking issue for this feature is: [#99515]
+
+[#99515]: https://github.com/rust-lang/rust/issues/99515
+
+------------------------
+"##,
+ },
+ Lint {
label: "trait_alias",
description: r##"# `trait_alias`
@@ -4146,6 +8734,28 @@ let foo: &dyn Foo = bar;
"##,
},
Lint {
+ label: "transmutability",
+ description: r##"# `transmutability`
+
+The tracking issue for this feature is: [#99571]
+
+[#99571]: https://github.com/rust-lang/rust/issues/99571
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "transmute_generic_consts",
+ description: r##"# `transmute_generic_consts`
+
+The tracking issue for this feature is: [#109929]
+
+[#109929]: https://github.com/rust-lang/rust/issues/109929
+
+------------------------
+"##,
+ },
+ Lint {
label: "transparent_unions",
description: r##"# `transparent_unions`
@@ -4233,6 +8843,59 @@ their application of these optimizations.
"##,
},
Lint {
+ label: "trivial_bounds",
+ description: r##"# `trivial_bounds`
+
+The tracking issue for this feature is: [#48214]
+
+[#48214]: https://github.com/rust-lang/rust/issues/48214
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_len",
+ description: r##"# `trusted_len`
+
+The tracking issue for this feature is: [#37572]
+
+[#37572]: https://github.com/rust-lang/rust/issues/37572
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_len_next_unchecked",
+ description: r##"# `trusted_len_next_unchecked`
+
+The tracking issue for this feature is: [#37572]
+
+[#37572]: https://github.com/rust-lang/rust/issues/37572
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_random_access",
+ description: r##"# `trusted_random_access`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "trusted_step",
+ description: r##"# `trusted_step`
+
+The tracking issue for this feature is: [#85731]
+
+[#85731]: https://github.com/rust-lang/rust/issues/85731
+
+------------------------
+"##,
+ },
+ Lint {
label: "try_blocks",
description: r##"# `try_blocks`
@@ -4267,6 +8930,92 @@ assert!(result.is_err());
"##,
},
Lint {
+ label: "try_find",
+ description: r##"# `try_find`
+
+The tracking issue for this feature is: [#63178]
+
+[#63178]: https://github.com/rust-lang/rust/issues/63178
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_reserve_kind",
+ description: r##"# `try_reserve_kind`
+
+The tracking issue for this feature is: [#48043]
+
+[#48043]: https://github.com/rust-lang/rust/issues/48043
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_trait_v2",
+ description: r##"# `try_trait_v2`
+
+The tracking issue for this feature is: [#84277]
+
+[#84277]: https://github.com/rust-lang/rust/issues/84277
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_trait_v2_residual",
+ description: r##"# `try_trait_v2_residual`
+
+The tracking issue for this feature is: [#91285]
+
+[#91285]: https://github.com/rust-lang/rust/issues/91285
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "try_trait_v2_yeet",
+ description: r##"# `try_trait_v2_yeet`
+
+The tracking issue for this feature is: [#96374]
+
+[#96374]: https://github.com/rust-lang/rust/issues/96374
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "tuple_trait",
+ description: r##"# `tuple_trait`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "type_alias_impl_trait",
+ description: r##"# `type_alias_impl_trait`
+
+The tracking issue for this feature is: [#63063]
+
+[#63063]: https://github.com/rust-lang/rust/issues/63063
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "type_ascription",
+ description: r##"# `type_ascription`
+
+The tracking issue for this feature is: [#23416]
+
+[#23416]: https://github.com/rust-lang/rust/issues/23416
+
+------------------------
+"##,
+ },
+ Lint {
label: "type_changing_struct_update",
description: r##"# `type_changing_struct_update`
@@ -4304,6 +9053,39 @@ fn main () {
"##,
},
Lint {
+ label: "type_name_of_val",
+ description: r##"# `type_name_of_val`
+
+The tracking issue for this feature is: [#66359]
+
+[#66359]: https://github.com/rust-lang/rust/issues/66359
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "type_privacy_lints",
+ description: r##"# `type_privacy_lints`
+
+The tracking issue for this feature is: [#48054]
+
+[#48054]: https://github.com/rust-lang/rust/issues/48054
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "uefi_std",
+ description: r##"# `uefi_std`
+
+The tracking issue for this feature is: [#100499]
+
+[#100499]: https://github.com/rust-lang/rust/issues/100499
+
+------------------------
+"##,
+ },
+ Lint {
label: "unboxed_closures",
description: r##"# `unboxed_closures`
@@ -4319,7 +9101,7 @@ The `unboxed_closures` feature allows you to write functions using the `"rust-ca
required for implementing the [`Fn*`] family of traits. `"rust-call"` functions must have
exactly one (non self) argument, a tuple representing the argument list.
-[`Fn*`]: https://doc.rust-lang.org/std/ops/trait.Fn.html
+[`Fn*`]: ../../std/ops/trait.Fn.html
```rust
#![feature(unboxed_closures)]
@@ -4333,6 +9115,200 @@ fn main() {}
"##,
},
Lint {
+ label: "unchecked_math",
+ description: r##"# `unchecked_math`
+
+The tracking issue for this feature is: [#85122]
+
+[#85122]: https://github.com/rust-lang/rust/issues/85122
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unicode_internals",
+ description: r##"# `unicode_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unique_rc_arc",
+ description: r##"# `unique_rc_arc`
+
+The tracking issue for this feature is: [#112566]
+
+[#112566]: https://github.com/rust-lang/rust/issues/112566
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_file_vectored_at",
+ description: r##"# `unix_file_vectored_at`
+
+The tracking issue for this feature is: [#89517]
+
+[#89517]: https://github.com/rust-lang/rust/issues/89517
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_set_mark",
+ description: r##"# `unix_set_mark`
+
+The tracking issue for this feature is: [#96467]
+
+[#96467]: https://github.com/rust-lang/rust/issues/96467
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_sigpipe",
+ description: r##"# `unix_sigpipe`
+
+The tracking issue for this feature is: [#97889]
+
+[#97889]: https://github.com/rust-lang/rust/issues/97889
+
+---
+
+The `#[unix_sigpipe = "..."]` attribute on `fn main()` can be used to specify how libstd shall setup `SIGPIPE` on Unix platforms before invoking `fn main()`. This attribute is ignored on non-Unix targets. There are three variants:
+* `#[unix_sigpipe = "inherit"]`
+* `#[unix_sigpipe = "sig_dfl"]`
+* `#[unix_sigpipe = "sig_ign"]`
+
+## `#[unix_sigpipe = "inherit"]`
+
+Leave `SIGPIPE` untouched before entering `fn main()`. Unless the parent process has changed the default `SIGPIPE` handler from `SIG_DFL` to something else, this will behave the same as `#[unix_sigpipe = "sig_dfl"]`.
+
+## `#[unix_sigpipe = "sig_dfl"]`
+
+Set the `SIGPIPE` handler to `SIG_DFL`. This will result in your program getting killed if it tries to write to a closed pipe. This is normally what you want if your program produces textual output.
+
+### Example
+
+```rust,no_run
+#![feature(unix_sigpipe)]
+#[unix_sigpipe = "sig_dfl"]
+fn main() { loop { println!("hello world"); } }
+```
+
+```bash
+% ./main | head -n 1
+hello world
+```
+
+## `#[unix_sigpipe = "sig_ign"]`
+
+Set the `SIGPIPE` handler to `SIG_IGN` before invoking `fn main()`. This will result in `ErrorKind::BrokenPipe` errors if you program tries to write to a closed pipe. This is normally what you want if you for example write socket servers, socket clients, or pipe peers.
+
+This is what libstd has done by default since 2014. (However, see the note on child processes below.)
+
+### Example
+
+```rust,no_run
+#![feature(unix_sigpipe)]
+#[unix_sigpipe = "sig_ign"]
+fn main() { loop { println!("hello world"); } }
+```
+
+```bash
+% ./main | head -n 1
+hello world
+thread 'main' panicked at 'failed printing to stdout: Broken pipe (os error 32)', library/std/src/io/stdio.rs:1016:9
+note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
+```
+
+### Note on child processes
+
+When spawning child processes, the legacy Rust behavior if `#[unix_sigpipe]` is not specified is to
+reset `SIGPIPE` to `SIG_DFL`.
+
+If `#[unix_sigpipe = "..."]` is specified, no matter what its value is, the signal disposition of
+`SIGPIPE` is no longer reset. This means that the child inherits the parent's `SIGPIPE` behavior.
+"##,
+ },
+ Lint {
+ label: "unix_socket_ancillary_data",
+ description: r##"# `unix_socket_ancillary_data`
+
+The tracking issue for this feature is: [#76915]
+
+[#76915]: https://github.com/rust-lang/rust/issues/76915
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unix_socket_peek",
+ description: r##"# `unix_socket_peek`
+
+The tracking issue for this feature is: [#76923]
+
+[#76923]: https://github.com/rust-lang/rust/issues/76923
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unnamed_fields",
+ description: r##"# `unnamed_fields`
+
+The tracking issue for this feature is: [#49804]
+
+[#49804]: https://github.com/rust-lang/rust/issues/49804
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsafe_cell_from_mut",
+ description: r##"# `unsafe_cell_from_mut`
+
+The tracking issue for this feature is: [#111645]
+
+[#111645]: https://github.com/rust-lang/rust/issues/111645
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsafe_pin_internals",
+ description: r##"# `unsafe_pin_internals`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsize",
+ description: r##"# `unsize`
+
+The tracking issue for this feature is: [#18598]
+
+[#18598]: https://github.com/rust-lang/rust/issues/18598
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "unsized_fn_params",
+ description: r##"# `unsized_fn_params`
+
+The tracking issue for this feature is: [#48055]
+
+[#48055]: https://github.com/rust-lang/rust/issues/48055
+
+------------------------
+"##,
+ },
+ Lint {
label: "unsized_locals",
description: r##"# `unsized_locals`
@@ -4543,6 +9519,17 @@ fn main() {
"##,
},
Lint {
+ label: "unwrap_infallible",
+ description: r##"# `unwrap_infallible`
+
+The tracking issue for this feature is: [#61695]
+
+[#61695]: https://github.com/rust-lang/rust/issues/61695
+
+------------------------
+"##,
+ },
+ Lint {
label: "update_panic_count",
description: r##"# `update_panic_count`
@@ -4552,6 +9539,149 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "used_with_arg",
+ description: r##"# `used_with_arg`
+
+The tracking issue for this feature is: [#93798]
+
+[#93798]: https://github.com/rust-lang/rust/issues/93798
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "utf16_extra",
+ description: r##"# `utf16_extra`
+
+The tracking issue for this feature is: [#94919]
+
+[#94919]: https://github.com/rust-lang/rust/issues/94919
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "utf16_extra_const",
+ description: r##"# `utf16_extra_const`
+
+The tracking issue for this feature is: [#94919]
+
+[#94919]: https://github.com/rust-lang/rust/issues/94919
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "utf8_chunks",
+ description: r##"# `utf8_chunks`
+
+The tracking issue for this feature is: [#99543]
+
+[#99543]: https://github.com/rust-lang/rust/issues/99543
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "variant_count",
+ description: r##"# `variant_count`
+
+The tracking issue for this feature is: [#73662]
+
+[#73662]: https://github.com/rust-lang/rust/issues/73662
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "vec_into_raw_parts",
+ description: r##"# `vec_into_raw_parts`
+
+The tracking issue for this feature is: [#65816]
+
+[#65816]: https://github.com/rust-lang/rust/issues/65816
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "vec_push_within_capacity",
+ description: r##"# `vec_push_within_capacity`
+
+The tracking issue for this feature is: [#100486]
+
+[#100486]: https://github.com/rust-lang/rust/issues/100486
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "vec_split_at_spare",
+ description: r##"# `vec_split_at_spare`
+
+The tracking issue for this feature is: [#81944]
+
+[#81944]: https://github.com/rust-lang/rust/issues/81944
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "waker_getters",
+ description: r##"# `waker_getters`
+
+The tracking issue for this feature is: [#87021]
+
+[#87021]: https://github.com/rust-lang/rust/issues/87021
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wasi_ext",
+ description: r##"# `wasi_ext`
+
+The tracking issue for this feature is: [#71213]
+
+[#71213]: https://github.com/rust-lang/rust/issues/71213
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wasm_abi",
+ description: r##"# `wasm_abi`
+
+The tracking issue for this feature is: [#83788]
+
+[#83788]: https://github.com/rust-lang/rust/issues/83788
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wasm_target_feature",
+ description: r##"# `wasm_target_feature`
+
+The tracking issue for this feature is: [#44839]
+
+[#44839]: https://github.com/rust-lang/rust/issues/44839
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_by_handle",
+ description: r##"# `windows_by_handle`
+
+The tracking issue for this feature is: [#63010]
+
+[#63010]: https://github.com/rust-lang/rust/issues/63010
+
+------------------------
+"##,
+ },
+ Lint {
label: "windows_c",
description: r##"# `windows_c`
@@ -4579,6 +9709,59 @@ This feature is internal to the Rust compiler and is not intended for general us
"##,
},
Lint {
+ label: "windows_process_exit_code_from",
+ description: r##"# `windows_process_exit_code_from`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_async_pipes",
+ description: r##"# `windows_process_extensions_async_pipes`
+
+The tracking issue for this feature is: [#98289]
+
+[#98289]: https://github.com/rust-lang/rust/issues/98289
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_force_quotes",
+ description: r##"# `windows_process_extensions_force_quotes`
+
+The tracking issue for this feature is: [#82227]
+
+[#82227]: https://github.com/rust-lang/rust/issues/82227
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_main_thread_handle",
+ description: r##"# `windows_process_extensions_main_thread_handle`
+
+The tracking issue for this feature is: [#96723]
+
+[#96723]: https://github.com/rust-lang/rust/issues/96723
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "windows_process_extensions_raw_attribute",
+ description: r##"# `windows_process_extensions_raw_attribute`
+
+The tracking issue for this feature is: [#114854]
+
+[#114854]: https://github.com/rust-lang/rust/issues/114854
+
+------------------------
+"##,
+ },
+ Lint {
label: "windows_stdio",
description: r##"# `windows_stdio`
@@ -4587,10 +9770,95 @@ This feature is internal to the Rust compiler and is not intended for general us
------------------------
"##,
},
+ Lint {
+ label: "with_negative_coherence",
+ description: r##"# `with_negative_coherence`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wrapping_int_impl",
+ description: r##"# `wrapping_int_impl`
+
+The tracking issue for this feature is: [#32463]
+
+[#32463]: https://github.com/rust-lang/rust/issues/32463
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "wrapping_next_power_of_two",
+ description: r##"# `wrapping_next_power_of_two`
+
+The tracking issue for this feature is: [#32463]
+
+[#32463]: https://github.com/rust-lang/rust/issues/32463
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "write_all_vectored",
+ description: r##"# `write_all_vectored`
+
+The tracking issue for this feature is: [#70436]
+
+[#70436]: https://github.com/rust-lang/rust/issues/70436
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "yeet_desugar_details",
+ description: r##"# `yeet_desugar_details`
+
+This feature has no tracking issue, and is therefore likely internal to the compiler, not being intended for general use.
+
+------------------------
+"##,
+ },
+ Lint {
+ label: "yeet_expr",
+ description: r##"# `yeet_expr`
+
+The tracking issue for this feature is: [#96373]
+
+[#96373]: https://github.com/rust-lang/rust/issues/96373
+
+------------------------
+
+The `yeet_expr` feature adds support for `do yeet` expressions,
+which can be used to early-exit from a function or `try` block.
+
+These are highly experimental, thus the placeholder syntax.
+
+```rust,edition2021
+#![feature(yeet_expr)]
+
+fn foo() -> Result<String, i32> {
+ do yeet 4;
+}
+assert_eq!(foo(), Err(4));
+
+fn bar() -> Option<String> {
+ do yeet;
+}
+assert_eq!(bar(), None);
+```
+"##,
+ },
];
pub const CLIPPY_LINTS: &[Lint] = &[
Lint {
+ label: "clippy::absolute_paths",
+ description: r##"Checks for usage of items through absolute paths, like `std::env::current_dir`."##,
+ },
+ Lint {
label: "clippy::absurd_extreme_comparisons",
description: r##"Checks for comparisons where one side of the relation is
either the minimum or maximum value for its type and warns if it involves a
@@ -4598,6 +9866,33 @@ case that is always true or always false. Only integer and boolean types are
checked."##,
},
Lint {
+ label: "clippy::alloc_instead_of_core",
+ description: r##"Finds items imported through `alloc` when available through `core`."##,
+ },
+ Lint {
+ label: "clippy::allow_attributes",
+ description: r##"Checks for usage of the `#[allow]` attribute and suggests replacing it with
+the `#[expect]` (See [RFC 2383](https://rust-lang.github.io/rfcs/2383-lint-reasons.html))
+
+The expect attribute is still unstable and requires the `lint_reasons`
+on nightly. It can be enabled by adding `#![feature(lint_reasons)]` to
+the crate root.
+
+This lint only warns outer attributes (`#[allow]`), as inner attributes
+(`#![allow]`) are usually used to enable or disable lints on a global scale."##,
+ },
+ Lint {
+ label: "clippy::allow_attributes_without_reason",
+ description: r##"Checks for attributes that allow lints without a reason.
+
+(This requires the `lint_reasons` feature)"##,
+ },
+ Lint {
+ label: "clippy::almost_complete_range",
+ description: r##"Checks for ranges which almost include the entire range of letters from 'a' to 'z'
+or digits from '0' to '9', but don't because they're a half open range."##,
+ },
+ Lint {
label: "clippy::almost_swapped",
description: r##"Checks for `foo = bar; bar = foo` sequences."##,
},
@@ -4611,22 +9906,50 @@ or
respectively, suggesting to use the predefined constant."##,
},
Lint {
+ label: "clippy::arc_with_non_send_sync",
+ description: r##".
+This lint warns when you use `Arc` with a type that does not implement `Send` or `Sync`."##,
+ },
+ Lint {
+ label: "clippy::arithmetic_side_effects",
+ description: r##"Checks any kind of arithmetic operation of any type.
+
+Operators like `+`, `-`, `*` or `<<` are usually capable of overflowing according to the [Rust
+Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
+or can panic (`/`, `%`).
+
+Known safe built-in types like `Wrapping` or `Saturating`, floats, operations in constant
+environments, allowed types and non-constant operations that won't overflow are ignored."##,
+ },
+ Lint {
label: "clippy::as_conversions",
description: r##"Checks for usage of `as` conversions.
Note that this lint is specialized in linting *every single* use of `as`
regardless of whether good alternatives exist or not.
If you want more precise lints for `as`, please consider using these separate lints:
-`unnecessary_cast`, `cast_lossless/possible_truncation/possible_wrap/precision_loss/sign_loss`,
+`unnecessary_cast`, `cast_lossless/cast_possible_truncation/cast_possible_wrap/cast_precision_loss/cast_sign_loss`,
`fn_to_numeric_cast(_with_truncation)`, `char_lit_as_u8`, `ref_to_mut` and `ptr_as_ptr`.
There is a good explanation the reason why this lint should work in this way and how it is useful
[in this issue](https://github.com/rust-lang/rust-clippy/issues/5122)."##,
},
Lint {
+ label: "clippy::as_ptr_cast_mut",
+ description: r##"Checks for the result of a `&self`-taking `as_ptr` being cast to a mutable pointer"##,
+ },
+ Lint {
+ label: "clippy::as_underscore",
+ description: r##"Checks for the usage of `as _` conversion using inferred type."##,
+ },
+ Lint {
label: "clippy::assertions_on_constants",
description: r##"Checks for `assert!(true)` and `assert!(false)` calls."##,
},
Lint {
+ label: "clippy::assertions_on_result_states",
+ description: r##"Checks for `assert!(r.is_ok())` or `assert!(r.is_err())` calls."##,
+ },
+ Lint {
label: "clippy::assign_op_pattern",
description: r##"Checks for `a = a op b` or `a = b commutative_op a`
patterns."##,
@@ -4641,14 +9964,17 @@ patterns."##,
that can themselves be awaited."##,
},
Lint {
+ label: "clippy::await_holding_invalid_type",
+ description: r##"Allows users to configure types which should not be held across `await`
+suspension points."##,
+ },
+ Lint {
label: "clippy::await_holding_lock",
- description: r##"Checks for calls to await while holding a
-non-async-aware MutexGuard."##,
+ description: r##"Checks for calls to await while holding a non-async-aware MutexGuard."##,
},
Lint {
label: "clippy::await_holding_refcell_ref",
- description: r##"Checks for calls to await while holding a
-`RefCell` `Ref` or `RefMut`."##,
+ description: r##"Checks for calls to await while holding a `RefCell` `Ref` or `RefMut`."##,
},
Lint {
label: "clippy::bad_bit_mask",
@@ -4659,14 +9985,18 @@ The formula for detecting if an expression of the type `_ <bit_op> m
{`!=`, `>=`, `>`, `!=`, `>=`, `>`}) can be determined from the following
table:
-|Comparison |Bit Op|Example |is always|Formula |
-|------------|------|------------|---------|----------------------|
-|`==` or `!=`| `&` |`x & 2 == 3`|`false` |`c & m != c` |
-|`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
-|`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
-|`==` or `!=`| `|` |`x | 1 == 0`|`false` |`c | m != c` |
-|`<` or `>=`| `|` |`x | 1 < 1` |`false` |`m >= c` |
-|`<=` or `>` | `|` |`x | 1 > 0` |`true` |`m > c` |"##,
+|Comparison |Bit Op|Example |is always|Formula |
+|------------|------|-------------|---------|----------------------|
+|`==` or `!=`| `&` |`x & 2 == 3` |`false` |`c & m != c` |
+|`<` or `>=`| `&` |`x & 2 < 3` |`true` |`m < c` |
+|`>` or `<=`| `&` |`x & 1 > 1` |`false` |`m <= c` |
+|`==` or `!=`| `\\|` |`x \\| 1 == 0`|`false` |`c \\| m != c` |
+|`<` or `>=`| `\\|` |`x \\| 1 < 1` |`false` |`m >= c` |
+|`<=` or `>` | `\\|` |`x \\| 1 > 0` |`true` |`m > c` |"##,
+ },
+ Lint {
+ label: "clippy::big_endian_bytes",
+ description: r##"Checks for the usage of the `to_be_bytes` method and/or the function `from_be_bytes`."##,
},
Lint {
label: "clippy::bind_instead_of_map",
@@ -4674,11 +10004,6 @@ table:
`_.or_else(|x| Err(y))`."##,
},
Lint {
- label: "clippy::blacklisted_name",
- description: r##"Checks for usage of blacklisted names for variables, such
-as `foo`."##,
- },
- Lint {
label: "clippy::blanket_clippy_restriction_lints",
description: r##"Checks for `warn`/`deny`/`forbid` attributes targeting the whole clippy::restriction category."##,
},
@@ -4698,21 +10023,38 @@ expression, statements or conditions that use closures with blocks."##,
suggest using the variable directly."##,
},
Lint {
+ label: "clippy::bool_to_int_with_if",
+ description: r##"Instead of using an if statement to convert a bool to an int,
+this lint suggests using a `from()` function or an `as` coercion."##,
+ },
+ Lint {
+ label: "clippy::borrow_as_ptr",
+ description: r##"Checks for the usage of `&expr as *const T` or
+`&mut expr as *mut T`, and suggest using `ptr::addr_of` or
+`ptr::addr_of_mut` instead."##,
+ },
+ Lint { label: "clippy::borrow_deref_ref", description: r##"Checks for `&*(&T)`."## },
+ Lint {
label: "clippy::borrow_interior_mutable_const",
description: r##"Checks if `const` items which is interior mutable (e.g.,
contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.) has been borrowed directly."##,
},
Lint {
label: "clippy::borrowed_box",
- description: r##"Checks for use of `&Box<T>` anywhere in the code.
+ description: r##"Checks for usage of `&Box<T>` anywhere in the code.
Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
},
Lint {
label: "clippy::box_collection",
- description: r##"Checks for use of `Box<T>` where T is a collection such as Vec anywhere in the code.
+ description: r##"Checks for usage of `Box<T>` where T is a collection such as Vec anywhere in the code.
Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
},
Lint {
+ label: "clippy::box_default",
+ description: r##"checks for `Box::new(T::default())`, which is better written as
+`Box::<T>::default()`."##,
+ },
+ Lint {
label: "clippy::boxed_local",
description: r##"Checks for usage of `Box<T>` where an unboxed `T` would
work fine."##,
@@ -4727,6 +10069,11 @@ moved out of the blocks."##,
description: r##"Warns if a generic shadows a built-in type."##,
},
Lint {
+ label: "clippy::bytes_count_to_len",
+ description: r##"It checks for `str::bytes().count()` and suggests replacing it with
+`str::len()`."##,
+ },
+ Lint {
label: "clippy::bytes_nth",
description: r##"Checks for the use of `.bytes().nth()`."##,
},
@@ -4741,22 +10088,41 @@ moved out of the blocks."##,
and suggests to use a case-insensitive approach instead."##,
},
Lint {
+ label: "clippy::cast_abs_to_unsigned",
+ description: r##"Checks for usage of the `abs()` method that cast the result to unsigned."##,
+ },
+ Lint {
+ label: "clippy::cast_enum_constructor",
+ description: r##"Checks for casts from an enum tuple constructor to an integer."##,
+ },
+ Lint {
+ label: "clippy::cast_enum_truncation",
+ description: r##"Checks for casts from an enum type to an integral type which will definitely truncate the
+value."##,
+ },
+ Lint {
label: "clippy::cast_lossless",
description: r##"Checks for casts between numerical types that may
be replaced by safe conversion functions."##,
},
Lint {
+ label: "clippy::cast_nan_to_int",
+ description: r##"Checks for a known NaN float being cast to an integer"##,
+ },
+ Lint {
label: "clippy::cast_possible_truncation",
description: r##"Checks for casts between numerical types that may
truncate large values. This is expected behavior, so the cast is `Allow` by
-default."##,
+default. It suggests user either explicitly ignore the lint,
+or use `try_from()` and handle the truncation, default, or panic explicitly."##,
},
Lint {
label: "clippy::cast_possible_wrap",
description: r##"Checks for casts from an unsigned type to a signed type of
-the same size. Performing such a cast is a 'no-op' for the compiler,
-i.e., nothing is changed at the bit level, and the binary representation of
-the value is reinterpreted. This can cause wrapping if the value is too big
+the same size, or possibly smaller due to target dependent integers.
+Performing such a cast is a 'no-op' for the compiler, i.e., nothing is
+changed at the bit level, and the binary representation of the value is
+reinterpreted. This can cause wrapping if the value is too big
for the target signed type. However, the cast works as defined, so this lint
is `Allow` by default."##,
},
@@ -4776,10 +10142,6 @@ or any 64-bit integer to `f64`."##,
from a less-strictly-aligned pointer to a more-strictly-aligned pointer"##,
},
Lint {
- label: "clippy::cast_ref_to_mut",
- description: r##"Checks for casts of `&T` to `&mut T` anywhere in the code."##,
- },
- Lint {
label: "clippy::cast_sign_loss",
description: r##"Checks for casts from a signed to an unsigned numerical
type. In this case, negative values wrap around to large positive values,
@@ -4787,6 +10149,14 @@ which can be quite surprising in practice. However, as the cast works as
defined, this lint is `Allow` by default."##,
},
Lint {
+ label: "clippy::cast_slice_different_sizes",
+ description: r##"Checks for `as` casts between raw pointers to slices with differently sized elements."##,
+ },
+ Lint {
+ label: "clippy::cast_slice_from_raw_parts",
+ description: r##"Checks for a raw slice being cast to a slice pointer"##,
+ },
+ Lint {
label: "clippy::char_lit_as_u8",
description: r##"Checks for expressions where a character literal is cast
to `u8` and suggests using a byte literal instead."##,
@@ -4806,8 +10176,8 @@ if it starts with a given char."##,
description: r##"Checks for explicit bounds checking when casting."##,
},
Lint {
- label: "clippy::clone_double_ref",
- description: r##"Checks for usage of `.clone()` on an `&&T`."##,
+ label: "clippy::clear_with_drain",
+ description: r##"Checks for usage of `.drain(..)` for the sole purpose of clearing a container."##,
},
Lint {
label: "clippy::clone_on_copy",
@@ -4821,10 +10191,9 @@ function syntax instead (e.g., `Rc::clone(foo)`)."##,
},
Lint {
label: "clippy::cloned_instead_of_copied",
- description: r##"Checks for usages of `cloned()` on an `Iterator` or `Option` where
+ description: r##"Checks for usage of `cloned()` on an `Iterator` or `Option` where
`copied()` could be used instead."##,
},
- Lint { label: "clippy::cmp_nan", description: r##"Checks for comparisons to NaN."## },
Lint {
label: "clippy::cmp_null",
description: r##"This lint checks for equality comparisons with `ptr::null`"##,
@@ -4857,6 +10226,15 @@ Note that this lint is not intended to find _all_ cases where nested match patte
cases where merging would most likely make the code more readable."##,
},
Lint {
+ label: "clippy::collapsible_str_replace",
+ description: r##"Checks for consecutive calls to `str::replace` (2 or more)
+that can be collapsed into a single call."##,
+ },
+ Lint {
+ label: "clippy::collection_is_never_read",
+ description: r##"Checks for collections that are never queried."##,
+ },
+ Lint {
label: "clippy::comparison_chain",
description: r##"Checks comparison chains written with `if` that can be
rewritten with `match` and `cmp`."##,
@@ -4872,6 +10250,10 @@ and suggests using `.is_empty()` where applicable."##,
`Iterator`."##,
},
Lint {
+ label: "clippy::crate_in_macro_def",
+ description: r##"Checks for usage of `crate` as opposed to `$crate` in a macro definition."##,
+ },
+ Lint {
label: "clippy::create_dir",
description: r##"Checks usage of `std::fs::create_dir` and suggest using `std::fs::create_dir_all` instead."##,
},
@@ -4879,7 +10261,10 @@ and suggests using `.is_empty()` where applicable."##,
label: "clippy::crosspointer_transmute",
description: r##"Checks for transmutes between a type `T` and `*T`."##,
},
- Lint { label: "clippy::dbg_macro", description: r##"Checks for usage of dbg!() macro."## },
+ Lint {
+ label: "clippy::dbg_macro",
+ description: r##"Checks for usage of the [`dbg!`](https://doc.rust-lang.org/std/macro.dbg.html) macro."##,
+ },
Lint {
label: "clippy::debug_assert_with_mut_call",
description: r##"Checks for function/method calls with a mutable
@@ -4895,6 +10280,15 @@ parameter in `debug_assert!`, `debug_assert_eq!` and `debug_assert_ne!` macros."
mutable (e.g., contains a `Cell`, `Mutex`, `AtomicXxxx`, etc.)."##,
},
Lint {
+ label: "clippy::default_constructed_unit_structs",
+ description: r##"Checks for construction on unit struct using `default`."##,
+ },
+ Lint {
+ label: "clippy::default_instead_of_iter_empty",
+ description: r##"It checks for `std::iter::Empty::default()` and suggests replacing it with
+`std::iter::empty()`."##,
+ },
+ Lint {
label: "clippy::default_numeric_fallback",
description: r##"Checks for usage of unconstrained numeric literals which may cause default numeric fallback in type
inference.
@@ -4910,6 +10304,10 @@ See [RFC0212](https://github.com/rust-lang/rfcs/blob/master/text/0212-restore-in
description: r##"Checks for literal calls to `Default::default()`."##,
},
Lint {
+ label: "clippy::default_union_representation",
+ description: r##"Displays a warning when a union is declared with the default representation (without a `#[repr(C)]` attribute)."##,
+ },
+ Lint {
label: "clippy::deprecated_cfg_attr",
description: r##"Checks for `#[cfg_attr(rustfmt, rustfmt_skip)]` and suggests to replace it
with `#[rustfmt::skip]`."##,
@@ -4924,22 +10322,46 @@ field that is not a valid semantic version."##,
description: r##"Checks for usage of `*&` and `*&mut` in expressions."##,
},
Lint {
+ label: "clippy::deref_by_slicing",
+ description: r##"Checks for slicing expressions which are equivalent to dereferencing the
+value."##,
+ },
+ Lint {
label: "clippy::derivable_impls",
description: r##"Detects manual `std::default::Default` implementations that are identical to a derived implementation."##,
},
Lint {
- label: "clippy::derive_hash_xor_eq",
- description: r##"Checks for deriving `Hash` but implementing `PartialEq`
-explicitly or vice versa."##,
+ label: "clippy::derive_ord_xor_partial_ord",
+ description: r##"Lints against manual `PartialOrd` and `Ord` implementations for types with a derived `Ord`
+or `PartialOrd` implementation."##,
},
Lint {
- label: "clippy::derive_ord_xor_partial_ord",
- description: r##"Checks for deriving `Ord` but implementing `PartialOrd`
-explicitly or vice versa."##,
+ label: "clippy::derive_partial_eq_without_eq",
+ description: r##"Checks for types that derive `PartialEq` and could implement `Eq`."##,
+ },
+ Lint {
+ label: "clippy::derived_hash_with_manual_eq",
+ description: r##"Lints against manual `PartialEq` implementations for types with a derived `Hash`
+implementation."##,
+ },
+ Lint {
+ label: "clippy::disallowed_macros",
+ description: r##"Denies the configured macros in clippy.toml
+
+Note: Even though this lint is warn-by-default, it will only trigger if
+macros are defined in the clippy.toml file."##,
},
Lint {
label: "clippy::disallowed_methods",
- description: r##"Denies the configured methods and functions in clippy.toml"##,
+ description: r##"Denies the configured methods and functions in clippy.toml
+
+Note: Even though this lint is warn-by-default, it will only trigger if
+methods are defined in the clippy.toml file."##,
+ },
+ Lint {
+ label: "clippy::disallowed_names",
+ description: r##"Checks for usage of disallowed names for variables, such
+as `foo`."##,
},
Lint {
label: "clippy::disallowed_script_idents",
@@ -4958,7 +10380,10 @@ See also: [`non_ascii_idents`].
},
Lint {
label: "clippy::disallowed_types",
- description: r##"Denies the configured types in clippy.toml."##,
+ description: r##"Denies the configured types in clippy.toml.
+
+Note: Even though this lint is warn-by-default, it will only trigger if
+types are defined in the clippy.toml file."##,
},
Lint {
label: "clippy::diverging_sub_expression",
@@ -4966,6 +10391,11 @@ See also: [`non_ascii_idents`].
statements."##,
},
Lint {
+ label: "clippy::doc_link_with_quotes",
+ description: r##"Detects the syntax `['foo']` in documentation comments (notice quotes instead of backticks)
+outside of code blocks"##,
+ },
+ Lint {
label: "clippy::doc_markdown",
description: r##"Checks for the presence of `_`, `::` or camel-case words
outside ticks in documentation."##,
@@ -4989,14 +10419,19 @@ marked as `#[must_use]`."##,
description: r##"Checks for unnecessary double parentheses."##,
},
Lint {
- label: "clippy::drop_copy",
- description: r##"Checks for calls to `std::mem::drop` with a value
-that derives the Copy trait"##,
+ label: "clippy::drain_collect",
+ description: r##"Checks for calls to `.drain()` that clear the collection, immediately followed by a call to `.collect()`.
+
+> Collection in this context refers to any type with a `drain` method:
+> `Vec`, `VecDeque`, `BinaryHeap`, `HashSet`,`HashMap`, `String`"##,
},
Lint {
- label: "clippy::drop_ref",
- description: r##"Checks for calls to `std::mem::drop` with a reference
-instead of an owned value."##,
+ label: "clippy::drop_non_drop",
+ description: r##"Checks for calls to `std::mem::drop` with a value that does not implement `Drop`."##,
+ },
+ Lint {
+ label: "clippy::duplicate_mod",
+ description: r##"Checks for files that are included as modules multiple times."##,
},
Lint {
label: "clippy::duplicate_underscore_argument",
@@ -5014,6 +10449,10 @@ from other `Duration` methods."##,
but without a final `else` branch."##,
},
Lint {
+ label: "clippy::empty_drop",
+ description: r##"Checks for empty `Drop` implementations."##,
+ },
+ Lint {
label: "clippy::empty_enum",
description: r##"Checks for `enum`s with no variants.
@@ -5022,11 +10461,19 @@ nightly-only experimental API. Therefore, this lint is only triggered
if the `never_type` is enabled."##,
},
Lint {
+ label: "clippy::empty_line_after_doc_comments",
+ description: r##"Checks for empty lines after documenation comments."##,
+ },
+ Lint {
label: "clippy::empty_line_after_outer_attr",
description: r##"Checks for empty lines after outer attributes"##,
},
Lint { label: "clippy::empty_loop", description: r##"Checks for empty `loop` expressions."## },
Lint {
+ label: "clippy::empty_structs_with_brackets",
+ description: r##"Finds structs without fields (a so-called empty struct) that are declared with brackets."##,
+ },
+ Lint {
label: "clippy::enum_clike_unportable_variant",
description: r##"Checks for C-like enumerations that are
`repr(isize/usize)` and have values that don't fit into an `i32`."##,
@@ -5052,10 +10499,18 @@ bitwise, difference and division binary operators (`==`, `>`, etc., `&&`,
description: r##"Checks for erasing operations, e.g., `x * 0`."##,
},
Lint {
- label: "clippy::eval_order_dependence",
- description: r##"Checks for a read and a write to the same variable where
-whether the read occurs before or after the write depends on the evaluation
-order of sub-expressions."##,
+ label: "clippy::err_expect",
+ description: r##"Checks for `.err().expect()` calls on the `Result` type."##,
+ },
+ Lint {
+ label: "clippy::error_impl_error",
+ description: r##"Checks for types named `Error` that implement `Error`."##,
+ },
+ Lint {
+ label: "clippy::excessive_nesting",
+ description: r##"Checks for blocks which are nested beyond a certain threshold.
+
+Note: Even though this lint is warn-by-default, it will only trigger if a maximum nesting level is defined in the clippy.toml file."##,
},
Lint {
label: "clippy::excessive_precision",
@@ -5072,8 +10527,7 @@ than that supported by the underlying type."##,
},
Lint {
label: "clippy::exit",
- description: r##"`exit()` terminates the program and doesn't provide a
-stack trace."##,
+ description: r##"Detects calls to the `exit()` function which terminates the program."##,
},
Lint {
label: "clippy::expect_fun_call",
@@ -5082,7 +10536,7 @@ etc., and suggests to use `unwrap_or_else` instead"##,
},
Lint {
label: "clippy::expect_used",
- description: r##"Checks for `.expect()` calls on `Option`s and `Result`s."##,
+ description: r##"Checks for `.expect()` or `.expect_err()` calls on `Result`s and `.expect()` call on `Option`s."##,
},
Lint {
label: "clippy::expl_impl_clone_on_copy",
@@ -5090,6 +10544,10 @@ etc., and suggests to use `unwrap_or_else` instead"##,
types."##,
},
Lint {
+ label: "clippy::explicit_auto_deref",
+ description: r##"Checks for dereferencing expressions which would be covered by auto-deref."##,
+ },
+ Lint {
label: "clippy::explicit_counter_loop",
description: r##"Checks `for` loops over slices with an explicit counter
and suggests the use of `.enumerate()`."##,
@@ -5127,6 +10585,10 @@ replaced with `(e)print!()` / `(e)println!()`"##,
anywhere else."##,
},
Lint {
+ label: "clippy::extra_unused_type_parameters",
+ description: r##"Checks for type parameters in generics that are never used anywhere else."##,
+ },
+ Lint {
label: "clippy::fallible_impl_from",
description: r##"Checks for impls of `From<..>` that contain `panic!()` or `unwrap()`"##,
},
@@ -5144,6 +10606,10 @@ with Default::default()."##,
description: r##"Nothing. This lint has been deprecated."##,
},
Lint {
+ label: "clippy::filter_map_bool_then",
+ description: r##"Checks for usage of `bool::then` in `Iterator::filter_map`."##,
+ },
+ Lint {
label: "clippy::filter_map_identity",
description: r##"Checks for usage of `filter_map(|x| x)`."##,
},
@@ -5162,7 +10628,7 @@ with Default::default()."##,
},
Lint {
label: "clippy::flat_map_option",
- description: r##"Checks for usages of `Iterator::flat_map()` where `filter_map()` could be
+ description: r##"Checks for usage of `Iterator::flat_map()` where `filter_map()` could be
used instead."##,
},
Lint { label: "clippy::float_arithmetic", description: r##"Checks for float arithmetic."## },
@@ -5211,18 +10677,12 @@ store address."##,
ignoring either the keys or values."##,
},
Lint {
- label: "clippy::for_loops_over_fallibles",
- description: r##"Checks for `for` loops over `Option` or `Result` values."##,
+ label: "clippy::forget_non_drop",
+ description: r##"Checks for calls to `std::mem::forget` with a value that does not implement `Drop`."##,
},
Lint {
- label: "clippy::forget_copy",
- description: r##"Checks for calls to `std::mem::forget` with a value that
-derives the Copy trait"##,
- },
- Lint {
- label: "clippy::forget_ref",
- description: r##"Checks for calls to `std::mem::forget` with a reference
-instead of an owned value."##,
+ label: "clippy::format_collect",
+ description: r##"Checks for usage of `.map(|_| format!(..)).collect::<String>()`."##,
},
Lint {
label: "clippy::format_in_format_args",
@@ -5231,6 +10691,15 @@ formatting such as `format!` itself, `write!` or `println!`. Suggests
inlining the `format!` call."##,
},
Lint {
+ label: "clippy::format_push_string",
+ description: r##"Detects cases where the result of a `format!` call is
+appended to an existing `String`."##,
+ },
+ Lint {
+ label: "clippy::four_forward_slashes",
+ description: r##"Checks for outer doc comments written with 4 forward slashes (`////`)."##,
+ },
+ Lint {
label: "clippy::from_iter_instead_of_collect",
description: r##"Checks for `from_iter()` function calls on types that implement the `FromIterator`
trait."##,
@@ -5240,6 +10709,10 @@ trait."##,
description: r##"Searches for implementations of the `Into<..>` trait and suggests to implement `From<..>` instead."##,
},
Lint {
+ label: "clippy::from_raw_with_void_ptr",
+ description: r##"Checks if we're passing a `c_void` raw pointer to `{Box,Rc,Arc,Weak}::from_raw(_)`"##,
+ },
+ Lint {
label: "clippy::from_str_radix_10",
description: r##"Checks for function invocations of the form `primitive::from_str_radix(s, 10)`"##,
},
@@ -5251,16 +10724,25 @@ used by library authors (public and internal) that target an audience where
multithreaded executors are likely to be used for running these Futures."##,
},
Lint {
+ label: "clippy::get_first",
+ description: r##"Checks for usage of `x.get(0)` instead of
+`x.first()`."##,
+ },
+ Lint {
label: "clippy::get_last_with_len",
- description: r##"Checks for using `x.get(x.len() - 1)` instead of
+ description: r##"Checks for usage of `x.get(x.len() - 1)` instead of
`x.last()`."##,
},
Lint {
label: "clippy::get_unwrap",
- description: r##"Checks for use of `.get().unwrap()` (or
+ description: r##"Checks for usage of `.get().unwrap()` (or
`.get_mut().unwrap`) on a standard library type which implements `Index`"##,
},
Lint {
+ label: "clippy::host_endian_bytes",
+ description: r##"Checks for the usage of the `to_ne_bytes` method and/or the function `from_ne_bytes`."##,
+ },
+ Lint {
label: "clippy::identity_op",
description: r##"Checks for identity operations, e.g., `x + 0`."##,
},
@@ -5285,13 +10767,21 @@ and the *else* part."##,
},
Lint {
label: "clippy::if_then_some_else_none",
- description: r##"Checks for if-else that could be written to `bool::then`."##,
+ description: r##"Checks for if-else that could be written using either `bool::then` or `bool::then_some`."##,
},
Lint {
label: "clippy::ifs_same_cond",
description: r##"Checks for consecutive `if`s with the same condition."##,
},
Lint {
+ label: "clippy::ignored_unit_patterns",
+ description: r##"Checks for usage of `_` in patterns of type `()`."##,
+ },
+ Lint {
+ label: "clippy::impl_trait_in_params",
+ description: r##"Lints when `impl Trait` is being used in a function's parameters."##,
+ },
+ Lint {
label: "clippy::implicit_clone",
description: r##"Checks for the usage of `_.to_owned()`, `vec.to_vec()`, or similar when calling `_.clone()` would be clearer."##,
},
@@ -5306,10 +10796,25 @@ algorithm (`SipHash`)."##,
description: r##"Checks for missing return statements at the end of a block."##,
},
Lint {
+ label: "clippy::implicit_saturating_add",
+ description: r##"Checks for implicit saturating addition."##,
+ },
+ Lint {
label: "clippy::implicit_saturating_sub",
description: r##"Checks for implicit saturating subtraction."##,
},
Lint {
+ label: "clippy::implied_bounds_in_impls",
+ description: r##"Looks for bounds in `impl Trait` in return position that are implied by other bounds.
+This can happen when a trait is specified that another trait already has as a supertrait
+(e.g. `fn() -> impl Deref + DerefMut<Target = i32>` has an unnecessary `Deref` bound,
+because `Deref` is a supertrait of `DerefMut`)"##,
+ },
+ Lint {
+ label: "clippy::impossible_comparisons",
+ description: r##"Checks for double comparisons that can never succeed"##,
+ },
+ Lint {
label: "clippy::imprecise_flops",
description: r##"Looks for floating-point expressions that
can be expressed using built-in methods to improve accuracy
@@ -5343,10 +10848,10 @@ lint on constant `usize` indexing on arrays because that is handled by rustc's `
without changing the outcome. The basic structure can be seen in the
following table:
-|Comparison| Bit Op |Example |equals |
-|----------|---------|-----------|-------|
-|`>` / `<=`|`|` / `^`|`x | 2 > 3`|`x > 3`|
-|`<` / `>=`|`|` / `^`|`x ^ 1 < 4`|`x < 4`|"##,
+|Comparison| Bit Op |Example |equals |
+|----------|----------|------------|-------|
+|`>` / `<=`|`\\|` / `^`|`x \\| 2 > 3`|`x > 3`|
+|`<` / `>=`|`\\|` / `^`|`x ^ 1 < 4` |`x < 4`|"##,
},
Lint {
label: "clippy::inefficient_to_string",
@@ -5371,6 +10876,12 @@ or tuple struct where a `let` will suffice."##,
description: r##"Checks for the definition of inherent methods with a signature of `to_string(&self) -> String` and if the type implementing this method also implements the `Display` trait."##,
},
Lint {
+ label: "clippy::init_numbered_fields",
+ description: r##"Checks for tuple structs initialized with field syntax.
+It will however not lint if a base initializer is present.
+The lint will also ignore code in macros."##,
+ },
+ Lint {
label: "clippy::inline_always",
description: r##"Checks for items annotated with `#[inline(always)]`,
unless the annotated function is empty or simply panics."##,
@@ -5395,16 +10906,6 @@ unless the annotated function is empty or simply panics."##,
label: "clippy::int_plus_one",
description: r##"Checks for usage of `x >= y + 1` or `x - 1 >= y` (and `<=`) in a block"##,
},
- Lint {
- label: "clippy::integer_arithmetic",
- description: r##"Checks for integer arithmetic operations which could overflow or panic.
-
-Specifically, checks for any operators (`+`, `-`, `*`, `<<`, etc) which are capable
-of overflowing according to the [Rust
-Reference](https://doc.rust-lang.org/reference/expressions/operator-expr.html#overflow),
-or which can panic (`/`, `%`). No bounds analysis or sophisticated reasoning is
-attempted."##,
- },
Lint { label: "clippy::integer_division", description: r##"Checks for division of integers"## },
Lint {
label: "clippy::into_iter_on_ref",
@@ -5412,6 +10913,12 @@ attempted."##,
or `iter_mut`."##,
},
Lint {
+ label: "clippy::into_iter_without_iter",
+ description: r##"This is the opposite of the `iter_without_into_iter` lint.
+It looks for `IntoIterator for (&|&mut) Type` implementations without an inherent `iter` or `iter_mut` method
+on the type or on any of the types in its `Deref` chain."##,
+ },
+ Lint {
label: "clippy::invalid_null_ptr_usage",
description: r##"This lint checks for invalid usages of `ptr::null`."##,
},
@@ -5432,10 +10939,20 @@ necessary. Only integer types are checked."##,
description: r##"Checks for invisible Unicode characters in the code."##,
},
Lint {
+ label: "clippy::is_digit_ascii_radix",
+ description: r##"Finds usages of [`char::is_digit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_digit) that
+can be replaced with [`is_ascii_digit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_ascii_digit) or
+[`is_ascii_hexdigit`](https://doc.rust-lang.org/stable/std/primitive.char.html#method.is_ascii_hexdigit)."##,
+ },
+ Lint {
label: "clippy::items_after_statements",
description: r##"Checks for items declared after some statement in a block."##,
},
Lint {
+ label: "clippy::items_after_test_module",
+ description: r##"Triggers if an item is declared after the testing module marked with `#[cfg(test)]`."##,
+ },
+ Lint {
label: "clippy::iter_cloned_collect",
description: r##"Checks for the use of `.cloned().collect()` on slice to
create a `Vec`."##,
@@ -5444,6 +10961,11 @@ create a `Vec`."##,
label: "clippy::iter_count",
description: r##"Checks for the use of `.iter().count()`."##,
},
+ Lint {
+ label: "clippy::iter_kv_map",
+ description: r##"Checks for iterating a map (`HashMap` or `BTreeMap`) and
+ignoring either the keys or values."##,
+ },
Lint { label: "clippy::iter_next_loop", description: r##"Checks for loops on `x.next()`."## },
Lint {
label: "clippy::iter_next_slice",
@@ -5455,7 +10977,7 @@ create a `Vec`."##,
},
Lint {
label: "clippy::iter_nth",
- description: r##"Checks for use of `.iter().nth()` (and the related
+ description: r##"Checks for usage of `.iter().nth()` (and the related
`.iter_mut().nth()`) on standard library types with *O*(1) element access."##,
},
Lint {
@@ -5463,8 +10985,37 @@ create a `Vec`."##,
description: r##"Checks for the use of `iter.nth(0)`."##,
},
Lint {
+ label: "clippy::iter_on_empty_collections",
+ description: r##"Checks for calls to `iter`, `iter_mut` or `into_iter` on empty collections"##,
+ },
+ Lint {
+ label: "clippy::iter_on_single_items",
+ description: r##"Checks for calls to `iter`, `iter_mut` or `into_iter` on collections containing a single item"##,
+ },
+ Lint {
+ label: "clippy::iter_out_of_bounds",
+ description: r##"Looks for iterator combinator calls such as `.take(x)` or `.skip(x)`
+where `x` is greater than the amount of items that an iterator will produce."##,
+ },
+ Lint {
+ label: "clippy::iter_overeager_cloned",
+ description: r##"Checks for usage of `_.cloned().<func>()` where call to `.cloned()` can be postponed."##,
+ },
+ Lint {
label: "clippy::iter_skip_next",
- description: r##"Checks for use of `.skip(x).next()` on iterators."##,
+ description: r##"Checks for usage of `.skip(x).next()` on iterators."##,
+ },
+ Lint {
+ label: "clippy::iter_skip_zero",
+ description: r##"Checks for usage of `.skip(0)` on iterators."##,
+ },
+ Lint {
+ label: "clippy::iter_with_drain",
+ description: r##"Checks for usage of `.drain(..)` on `Vec` and `VecDeque` for iteration."##,
+ },
+ Lint {
+ label: "clippy::iter_without_into_iter",
+ description: r##"Looks for `iter` and `iter_mut` methods without an associated `IntoIterator for (&|&mut) Type` implementation."##,
},
Lint {
label: "clippy::iterator_step_by_zero",
@@ -5492,10 +11043,29 @@ are too large."##,
`enum`s."##,
},
Lint {
+ label: "clippy::large_futures",
+ description: r##"It checks for the size of a `Future` created by `async fn` or `async {}`."##,
+ },
+ Lint {
+ label: "clippy::large_include_file",
+ description: r##"Checks for the inclusion of large files via `include_bytes!()`
+and `include_str!()`"##,
+ },
+ Lint {
label: "clippy::large_stack_arrays",
description: r##"Checks for local arrays that may be too large."##,
},
Lint {
+ label: "clippy::large_stack_frames",
+ description: r##"Checks for functions that use a lot of stack space.
+
+This often happens when constructing a large type, such as an array with a lot of elements,
+or constructing *many* smaller-but-still-large structs, or copying around a lot of large types.
+
+This lint is a more general version of [`large_stack_arrays`](https://rust-lang.github.io/rust-clippy/master/#large_stack_arrays)
+that is intended to look at functions as a whole instead of only individual array expressions inside of a function."##,
+ },
+ Lint {
label: "clippy::large_types_passed_by_value",
description: r##"Checks for functions taking arguments by value, where
the argument type is `Copy` and large enough to be worth considering
@@ -5519,29 +11089,42 @@ just to compare to zero, and suggests using `.is_empty()` where applicable."##,
returned."##,
},
Lint {
- label: "clippy::let_underscore_drop",
- description: r##"Checks for `let _ = <expr>`
-where expr has a type that implements `Drop`"##,
+ label: "clippy::let_underscore_future",
+ description: r##"Checks for `let _ = <expr>` where the resulting type of expr implements `Future`"##,
},
Lint {
label: "clippy::let_underscore_lock",
- description: r##"Checks for `let _ = sync_lock`.
-This supports `mutex` and `rwlock` in `std::sync` and `parking_lot`."##,
+ description: r##"Checks for `let _ = sync_lock`. This supports `mutex` and `rwlock` in
+`parking_lot`. For `std` locks see the `rustc` lint
+[`let_underscore_lock`](https://doc.rust-lang.org/nightly/rustc/lints/listing/deny-by-default.html#let-underscore-lock)"##,
},
Lint {
label: "clippy::let_underscore_must_use",
description: r##"Checks for `let _ = <expr>` where expr is `#[must_use]`"##,
},
+ Lint {
+ label: "clippy::let_underscore_untyped",
+ description: r##"Checks for `let _ = <expr>` without a type annotation, and suggests to either provide one,
+or remove the `let` keyword altogether."##,
+ },
Lint { label: "clippy::let_unit_value", description: r##"Checks for binding a unit value."## },
Lint {
+ label: "clippy::let_with_type_underscore",
+ description: r##"Detects when a variable is declared with an explicit type of `_`."##,
+ },
+ Lint {
+ label: "clippy::lines_filter_map_ok",
+ description: r##"Checks for usage of `lines.filter_map(Result::ok)` or `lines.flat_map(Result::ok)`
+when `lines` has type `std::io::Lines`."##,
+ },
+ Lint {
label: "clippy::linkedlist",
description: r##"Checks for usage of any `LinkedList`, suggesting to use a
`Vec` or a `VecDeque` (formerly called `RingBuf`)."##,
},
Lint {
- label: "clippy::logic_bug",
- description: r##"Checks for boolean expressions that contain terminals that
-can be eliminated."##,
+ label: "clippy::little_endian_bytes",
+ description: r##"Checks for the usage of the `to_le_bytes` method and/or the function `from_le_bytes`."##,
},
Lint {
label: "clippy::lossy_float_literal",
@@ -5565,23 +11148,74 @@ cannot be represented as the underlying type without loss."##,
description: r##"It checks for manual implementations of `async` functions."##,
},
Lint {
+ label: "clippy::manual_bits",
+ description: r##"Checks for usage of `std::mem::size_of::<T>() * 8` when
+`T::BITS` is available."##,
+ },
+ Lint {
+ label: "clippy::manual_clamp",
+ description: r##"Identifies good opportunities for a clamp function from std or core, and suggests using it."##,
+ },
+ Lint {
+ label: "clippy::manual_filter",
+ description: r##"Checks for usage of `match` which could be implemented using `filter`"##,
+ },
+ Lint {
label: "clippy::manual_filter_map",
description: r##"Checks for usage of `_.filter(_).map(_)` that can be written more simply
as `filter_map(_)`."##,
},
Lint {
+ label: "clippy::manual_find",
+ description: r##"Checks for manual implementations of Iterator::find"##,
+ },
+ Lint {
label: "clippy::manual_find_map",
description: r##"Checks for usage of `_.find(_).map(_)` that can be written more simply
as `find_map(_)`."##,
},
Lint {
label: "clippy::manual_flatten",
- description: r##"Check for unnecessary `if let` usage in a for loop
+ description: r##"Checks for unnecessary `if let` usage in a for loop
where only the `Some` or `Ok` variant of the iterator element is used."##,
},
Lint {
+ label: "clippy::manual_hash_one",
+ description: r##"Checks for cases where [`BuildHasher::hash_one`] can be used.
+
+[`BuildHasher::hash_one`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html#method.hash_one"##,
+ },
+ Lint {
+ label: "clippy::manual_instant_elapsed",
+ description: r##"Lints subtraction between `Instant::now()` and another `Instant`."##,
+ },
+ Lint {
+ label: "clippy::manual_is_ascii_check",
+ description: r##"Suggests to use dedicated built-in methods,
+`is_ascii_(lowercase|uppercase|digit)` for checking on corresponding ascii range"##,
+ },
+ Lint {
+ label: "clippy::manual_is_finite",
+ description: r##"Checks for manual `is_finite` reimplementations
+(i.e., `x != <float>::INFINITY && x != <float>::NEG_INFINITY`)."##,
+ },
+ Lint {
+ label: "clippy::manual_is_infinite",
+ description: r##"Checks for manual `is_infinite` reimplementations
+(i.e., `x == <float>::INFINITY || x == <float>::NEG_INFINITY`)."##,
+ },
+ Lint {
+ label: "clippy::manual_let_else",
+ description: r##"Warn of cases where `let...else` could be used"##,
+ },
+ Lint {
+ label: "clippy::manual_main_separator_str",
+ description: r##"Checks for references on `std::path::MAIN_SEPARATOR.to_string()` used
+to build a `&str`."##,
+ },
+ Lint {
label: "clippy::manual_map",
- description: r##"Checks for usages of `match` which could be implemented using `map`"##,
+ description: r##"Checks for usage of `match` which could be implemented using `map`"##,
},
Lint {
label: "clippy::manual_memcpy",
@@ -5589,6 +11223,10 @@ where only the `Some` or `Ok` variant of the iterator element is used."##,
slices that could be optimized by having a memcpy."##,
},
Lint {
+ label: "clippy::manual_next_back",
+ description: r##"Checks for `.rev().next()` on a `DoubleEndedIterator`"##,
+ },
+ Lint {
label: "clippy::manual_non_exhaustive",
description: r##"Checks for manual implementations of the non-exhaustive pattern."##,
},
@@ -5602,28 +11240,66 @@ slices that could be optimized by having a memcpy."##,
be more readably expressed as `(3..8).contains(x)`."##,
},
Lint {
+ label: "clippy::manual_range_patterns",
+ description: r##"Looks for combined OR patterns that are all contained in a specific range,
+e.g. `6 | 4 | 5 | 9 | 7 | 8` can be rewritten as `4..=9`."##,
+ },
+ Lint {
+ label: "clippy::manual_rem_euclid",
+ description: r##"Checks for an expression like `((x % 4) + 4) % 4` which is a common manual reimplementation
+of `x.rem_euclid(4)`."##,
+ },
+ Lint {
+ label: "clippy::manual_retain",
+ description: r##"Checks for code to be replaced by `.retain()`."##,
+ },
+ Lint {
label: "clippy::manual_saturating_arithmetic",
description: r##"Checks for `.checked_add/sub(x).unwrap_or(MAX/MIN)`."##,
},
Lint {
+ label: "clippy::manual_slice_size_calculation",
+ description: r##"When `a` is `&[T]`, detect `a.len() * size_of::<T>()` and suggest `size_of_val(a)`
+instead."##,
+ },
+ Lint {
label: "clippy::manual_split_once",
- description: r##"Checks for usages of `str::splitn(2, _)`"##,
+ description: r##"Checks for usage of `str::splitn(2, _)`"##,
},
Lint {
label: "clippy::manual_str_repeat",
description: r##"Checks for manual implementations of `str::repeat`"##,
},
Lint {
+ label: "clippy::manual_string_new",
+ description: r##"Checks for usage of `` to create a `String`, such as `.to_string()`, `.to_owned()`,
+`String::from()` and others."##,
+ },
+ Lint {
label: "clippy::manual_strip",
description: r##"Suggests using `strip_{prefix,suffix}` over `str::{starts,ends}_with` and slicing using
the pattern's length."##,
},
- Lint { label: "clippy::manual_swap", description: r##"Checks for manual swapping."## },
+ Lint {
+ label: "clippy::manual_swap",
+ description: r##"Checks for manual swapping.
+
+Note that the lint will not be emitted in const blocks, as the suggestion would not be applicable."##,
+ },
+ Lint {
+ label: "clippy::manual_try_fold",
+ description: r##"Checks for usage of `Iterator::fold` with a type that implements `Try`."##,
+ },
Lint {
label: "clippy::manual_unwrap_or",
description: r##"Finds patterns that reimplement `Option::unwrap_or` or `Result::unwrap_or`."##,
},
Lint {
+ label: "clippy::manual_while_let_some",
+ description: r##"Looks for loops that check for emptiness of a `Vec` in the condition and pop an element
+in the body as a separate operation."##,
+ },
+ Lint {
label: "clippy::many_single_char_names",
description: r##"Checks for too many variables whose name consists of a
single character."##,
@@ -5640,7 +11316,7 @@ and suggests `cloned()` or `copied()` instead"##,
},
Lint {
label: "clippy::map_entry",
- description: r##"Checks for uses of `contains_key` + `insert` on `HashMap`
+ description: r##"Checks for usage of `contains_key` + `insert` on `HashMap`
or `BTreeMap`."##,
},
Lint {
@@ -5695,7 +11371,10 @@ instead. It also checks for `if let &foo = bar` blocks."##,
},
Lint {
label: "clippy::match_same_arms",
- description: r##"Checks for `match` with identical arm bodies."##,
+ description: r##"Checks for `match` with identical arm bodies.
+
+Note: Does not lint on wildcards if the `non_exhaustive_omitted_patterns_lint` feature is
+enabled and disallowed."##,
},
Lint {
label: "clippy::match_single_binding",
@@ -5719,9 +11398,14 @@ and take drastic actions like `panic!`."##,
description: r##"Checks for iteration that may be infinite."##,
},
Lint {
+ label: "clippy::maybe_misused_cfg",
+ description: r##"Checks for `#[cfg(features = ...)]` and suggests to replace it with
+`#[cfg(feature = ...)]`."##,
+ },
+ Lint {
label: "clippy::mem_forget",
description: r##"Checks for usage of `std::mem::forget(t)` where `t` is
-`Drop`."##,
+`Drop` or has a field that implements `Drop`."##,
},
Lint {
label: "clippy::mem_replace_option_with_none",
@@ -5739,6 +11423,13 @@ and take drastic actions like `panic!`."##,
and `mem::replace(&mut _, mem::zeroed())`."##,
},
Lint {
+ label: "clippy::min_ident_chars",
+ description: r##"Checks for idents which comprise of a single letter.
+
+Note: This lint can be very noisy when enabled; it may be desirable to only enable it
+temporarily."##,
+ },
+ Lint {
label: "clippy::min_max",
description: r##"Checks for expressions where `std::cmp::min` and `max` are
used to clamp values, but switched so that the result is constant."##,
@@ -5752,17 +11443,37 @@ used to clamp values, but switched so that the result is constant."##,
description: r##"Checks for cfg attributes having operating systems used in target family position."##,
},
Lint {
+ label: "clippy::mismatching_type_param_order",
+ description: r##"Checks for type parameters which are positioned inconsistently between
+a type definition and impl block. Specifically, a parameter in an impl
+block which has the same name as a parameter in the type def, but is in
+a different place."##,
+ },
+ Lint {
+ label: "clippy::misnamed_getters",
+ description: r##"Checks for getter methods that return a field that doesn't correspond
+to the name of the method, when there is a field's whose name matches that of the method."##,
+ },
+ Lint {
label: "clippy::misrefactored_assign_op",
description: r##"Checks for `a op= a op b` or `a op= b op a` patterns."##,
},
Lint {
+ label: "clippy::missing_assert_message",
+ description: r##"Checks assertions without a custom panic message."##,
+ },
+ Lint {
+ label: "clippy::missing_asserts_for_indexing",
+ description: r##"Checks for repeated slice indexing without asserting beforehand that the length
+is greater than the largest index used to index into the slice."##,
+ },
+ Lint {
label: "clippy::missing_const_for_fn",
description: r##"Suggests the use of `const` in functions and methods where possible."##,
},
Lint {
label: "clippy::missing_docs_in_private_items",
- description: r##"Warns if there is missing doc for any documentable item
-(public or private)."##,
+ description: r##"Warns if there is missing doc for any private documentable item"##,
},
Lint {
label: "clippy::missing_enforced_import_renames",
@@ -5775,6 +11486,10 @@ in the `enforce-import-renames` config option."##,
return a `Result` type and warns if there is no `# Errors` section."##,
},
Lint {
+ label: "clippy::missing_fields_in_debug",
+ description: r##"Checks for manual [`core::fmt::Debug`](https://doc.rust-lang.org/core/fmt/trait.Debug.html) implementations that do not use all fields."##,
+ },
+ Lint {
label: "clippy::missing_inline_in_public_items",
description: r##"It lints if an exported function, method, trait method with default impl,
or trait method impl is not `#[inline]`."##,
@@ -5789,6 +11504,17 @@ may panic and warns if there is no `# Panics` section."##,
description: r##"Checks for the doc comments of publicly visible
unsafe functions and warns if there is no `# Safety` section."##,
},
+ Lint { label: "clippy::missing_spin_loop", description: r##"Checks for empty spin loops"## },
+ Lint {
+ label: "clippy::missing_trait_methods",
+ description: r##"Checks if a provided method is used implicitly by a trait
+implementation. A usage example would be a wrapper where every method
+should perform some operation before delegating to the inner type's
+implementation.
+
+This lint should typically be enabled on a specific trait `impl` item
+rather than globally."##,
+ },
Lint {
label: "clippy::mistyped_literal_suffixes",
description: r##"Warns for mistyped suffix in literals"##,
@@ -5799,8 +11525,14 @@ unsafe functions and warns if there is no `# Safety` section."##,
digits."##,
},
Lint {
+ label: "clippy::mixed_read_write_in_expression",
+ description: r##"Checks for a read and a write to the same variable where
+whether the read occurs before or after the write depends on the evaluation
+order of sub-expressions."##,
+ },
+ Lint {
label: "clippy::mod_module_files",
- description: r##"Checks that module layout uses only self named module files, bans mod.rs files."##,
+ description: r##"Checks that module layout uses only self named module files, bans `mod.rs` files."##,
},
Lint {
label: "clippy::module_inception",
@@ -5818,6 +11550,7 @@ containing module's name."##,
description: r##"Checks for getting the remainder of a division by one or minus
one."##,
},
+ Lint { label: "clippy::multi_assignments", description: r##"Checks for nested assignments."## },
Lint {
label: "clippy::multiple_crate_versions",
description: r##"Checks to see if multiple versions of a crate are being
@@ -5828,6 +11561,10 @@ used."##,
description: r##"Checks for multiple inherent implementations of a struct"##,
},
Lint {
+ label: "clippy::multiple_unsafe_ops_per_block",
+ description: r##"Checks for `unsafe` blocks that contain more than one unsafe operation."##,
+ },
+ Lint {
label: "clippy::must_use_candidate",
description: r##"Checks for public functions that have no
`#[must_use]` attribute, but return something not already marked
@@ -5840,8 +11577,12 @@ unit-returning functions and methods."##,
},
Lint {
label: "clippy::mut_from_ref",
- description: r##"This lint checks for functions that take immutable
-references and return mutable ones."##,
+ description: r##"This lint checks for functions that take immutable references and return
+mutable ones. This will not trigger if no unsafe code exists as there
+are multiple safe functions which will do this transformation
+
+To be on the conservative side, if there's at least one mutable
+reference with the output lifetime, this lint will not trigger."##,
},
Lint {
label: "clippy::mut_mut",
@@ -5861,11 +11602,11 @@ references and return mutable ones."##,
},
Lint {
label: "clippy::mutex_atomic",
- description: r##"Checks for usages of `Mutex<X>` where an atomic will do."##,
+ description: r##"Checks for usage of `Mutex<X>` where an atomic will do."##,
},
Lint {
label: "clippy::mutex_integer",
- description: r##"Checks for usages of `Mutex<X>` where `X` is an integral
+ description: r##"Checks for usage of `Mutex<X>` where `X` is an integral
type."##,
},
Lint { label: "clippy::naive_bytecount", description: r##"Checks for naive byte counts"## },
@@ -5876,7 +11617,7 @@ specify the `Self`-type explicitly"##,
},
Lint {
label: "clippy::needless_bitwise_bool",
- description: r##"Checks for uses of bitwise and/or operators between booleans, where performance may be improved by using
+ description: r##"Checks for usage of bitwise and/or operators between booleans, where performance may be improved by using
a lazy and."##,
},
Lint {
@@ -5885,16 +11626,27 @@ a lazy and."##,
false }` (or vice versa) and suggests using the condition directly."##,
},
Lint {
+ label: "clippy::needless_bool_assign",
+ description: r##"Checks for expressions of the form `if c { x = true } else { x = false }`
+(or vice versa) and suggest assigning the variable directly from the
+condition."##,
+ },
+ Lint {
label: "clippy::needless_borrow",
description: r##"Checks for address of operations (`&`) that are going to
be dereferenced immediately by the compiler."##,
},
Lint {
label: "clippy::needless_borrowed_reference",
- description: r##"Checks for bindings that destructure a reference and borrow the inner
+ description: r##"Checks for bindings that needlessly destructure a reference and borrow the inner
value with `&ref`."##,
},
Lint {
+ label: "clippy::needless_borrows_for_generic_args",
+ description: r##"Checks for borrow operations (`&`) that used as a generic argument to a
+function when the borrowed value could be used."##,
+ },
+ Lint {
label: "clippy::needless_collect",
description: r##"Checks for functions collecting an iterator when collect
is not needed."##,
@@ -5910,12 +11662,17 @@ rearrangement of code can make the code easier to understand."##,
label: "clippy::needless_doctest_main",
description: r##"Checks for `fn main() { .. }` in doctests"##,
},
+ Lint { label: "clippy::needless_else", description: r##"Checks for empty `else` branches."## },
Lint {
label: "clippy::needless_for_each",
description: r##"Checks for usage of `for_each` that would be more simply written as a
`for` loop."##,
},
Lint {
+ label: "clippy::needless_if",
+ description: r##"Checks for empty `if` branches with no else branch."##,
+ },
+ Lint {
label: "clippy::needless_late_init",
description: r##"Checks for late initializations that can be replaced by a `let` statement
with an initializer."##,
@@ -5926,17 +11683,42 @@ with an initializer."##,
relying on lifetime elision."##,
},
Lint {
+ label: "clippy::needless_match",
+ description: r##"Checks for unnecessary `match` or match-like `if let` returns for `Option` and `Result`
+when function signatures are the same."##,
+ },
+ Lint {
label: "clippy::needless_option_as_deref",
- description: r##"Checks for no-op uses of Option::{as_deref,as_deref_mut},
+ description: r##"Checks for no-op uses of `Option::{as_deref, as_deref_mut}`,
for example, `Option<&T>::as_deref()` returns the same type."##,
},
Lint {
+ label: "clippy::needless_option_take",
+ description: r##"Checks for calling `take` function after `as_ref`."##,
+ },
+ Lint {
+ label: "clippy::needless_parens_on_range_literals",
+ description: r##"The lint checks for parenthesis on literals in range statements that are
+superfluous."##,
+ },
+ Lint {
+ label: "clippy::needless_pass_by_ref_mut",
+ description: r##"Check if a `&mut` function argument is actually used mutably.
+
+Be careful if the function is publicly reexported as it would break compatibility with
+users of this function."##,
+ },
+ Lint {
label: "clippy::needless_pass_by_value",
description: r##"Checks for functions taking arguments by value, but not
consuming them in its
body."##,
},
Lint {
+ label: "clippy::needless_pub_self",
+ description: r##"Checks for usage of `pub(self)` and `pub(in self)`."##,
+ },
+ Lint {
label: "clippy::needless_question_mark",
description: r##"Suggests alternatives for useless applications of `?` in terminating expressions"##,
},
@@ -5946,12 +11728,24 @@ body."##,
collection just to get the values by index."##,
},
Lint {
+ label: "clippy::needless_raw_string_hashes",
+ description: r##"Checks for raw string literals with an unnecessary amount of hashes around them."##,
+ },
+ Lint {
+ label: "clippy::needless_raw_strings",
+ description: r##"Checks for raw string literals where a string literal can be used instead."##,
+ },
+ Lint {
label: "clippy::needless_return",
description: r##"Checks for return statements at the end of a block."##,
},
Lint {
+ label: "clippy::needless_return_with_question_mark",
+ description: r##"Checks for return statements on `Err` paired with the `?` operator."##,
+ },
+ Lint {
label: "clippy::needless_splitn",
- description: r##"Checks for usages of `str::splitn` (or `str::rsplitn`) where using `str::split` would be the same."##,
+ description: r##"Checks for usage of `str::splitn` (or `str::rsplitn`) where using `str::split` would be the same."##,
},
Lint {
label: "clippy::needless_update",
@@ -5985,7 +11779,7 @@ This lint is not applied to structs marked with
},
Lint {
label: "clippy::new_without_default",
- description: r##"Checks for types with a `fn new() -> Self` method and no
+ description: r##"Checks for public types with a `pub fn new() -> Self` method and no
implementation of
[`Default`](https://doc.rust-lang.org/std/default/trait.Default.html)."##,
},
@@ -5994,14 +11788,34 @@ implementation of
description: r##"Checks for statements which have no effect."##,
},
Lint {
+ label: "clippy::no_effect_replace",
+ description: r##"Checks for `replace` statements which have no effect."##,
+ },
+ Lint {
label: "clippy::no_effect_underscore_binding",
description: r##"Checks for binding to underscore prefixed variable without side-effects."##,
},
Lint {
+ label: "clippy::no_mangle_with_rust_abi",
+ description: r##"Checks for Rust ABI functions with the `#[no_mangle]` attribute."##,
+ },
+ Lint {
label: "clippy::non_ascii_literal",
description: r##"Checks for non-ASCII characters in string and char literals."##,
},
Lint {
+ label: "clippy::non_canonical_clone_impl",
+ description: r##"Checks for non-canonical implementations of `Clone` when `Copy` is already implemented."##,
+ },
+ Lint {
+ label: "clippy::non_canonical_partial_ord_impl",
+ description: r##"Checks for non-canonical implementations of `PartialOrd` when `Ord` is already implemented."##,
+ },
+ Lint {
+ label: "clippy::non_minimal_cfg",
+ description: r##"Checks for `any` and `all` combinators in `cfg` with only one condition."##,
+ },
+ Lint {
label: "clippy::non_octal_unix_permissions",
description: r##"Checks for non-octal values used to set Unix file permissions."##,
},
@@ -6033,12 +11847,20 @@ that make no sense."##,
arguments but are not marked `unsafe`."##,
},
Lint {
+ label: "clippy::obfuscated_if_else",
+ description: r##"Checks for usage of `.then_some(..).unwrap_or(..)`"##,
+ },
+ Lint {
label: "clippy::octal_escapes",
description: r##"Checks for `\\0` escapes in string and byte literals that look like octal
character escapes in C."##,
},
Lint { label: "clippy::ok_expect", description: r##"Checks for usage of `ok().expect(..)`."## },
Lint {
+ label: "clippy::only_used_in_recursion",
+ description: r##"Checks for arguments that are only used in recursion with no side-effects."##,
+ },
+ Lint {
label: "clippy::op_ref",
description: r##"Checks for arguments to `==` which have their address
taken to satisfy a bound
@@ -6046,7 +11868,7 @@ and suggests to dereference the other argument instead"##,
},
Lint {
label: "clippy::option_as_ref_deref",
- description: r##"Checks for usage of `_.as_ref().map(Deref::deref)` or it's aliases (such as String::as_str)."##,
+ description: r##"Checks for usage of `_.as_ref().map(Deref::deref)` or its aliases (such as String::as_str)."##,
},
Lint {
label: "clippy::option_env_unwrap",
@@ -6059,7 +11881,8 @@ suggests usage of the `env!` macro."##,
},
Lint {
label: "clippy::option_if_let_else",
- description: r##"Lints usage of `if let Some(v) = ... { y } else { x }` which is more
+ description: r##"Lints usage of `if let Some(v) = ... { y } else { x }` and
+`match .. { Some(v) => y, None/_ => x }` which are more
idiomatically done with `Option::map_or` (if the else bit is a pure
expression) or `Option::map_or_else` (if the else bit is an impure
expression)."##,
@@ -6075,14 +11898,19 @@ or closure that returns the unit type `()`."##,
},
Lint {
label: "clippy::option_option",
- description: r##"Checks for use of `Option<Option<_>>` in function signatures and type
+ description: r##"Checks for usage of `Option<Option<_>>` in function signatures and type
definitions"##,
},
Lint {
label: "clippy::or_fun_call",
description: r##"Checks for calls to `.or(foo(..))`, `.unwrap_or(foo(..))`,
-etc., and suggests to use `or_else`, `unwrap_or_else`, etc., or
-`unwrap_or_default` instead."##,
+`.or_insert(foo(..))` etc., and suggests to use `.or_else(|| foo(..))`,
+`.unwrap_or_else(|| foo(..))`, `.unwrap_or_default()` or `.or_default()`
+etc. instead."##,
+ },
+ Lint {
+ label: "clippy::or_then_unwrap",
+ description: r##"Checks for `.or(…).unwrap()` calls to Options and Results."##,
},
Lint {
label: "clippy::out_of_bounds_indexing",
@@ -6093,25 +11921,49 @@ index."##,
label: "clippy::overflow_check_conditional",
description: r##"Detects classic underflow/overflow checks."##,
},
+ Lint {
+ label: "clippy::overly_complex_bool_expr",
+ description: r##"Checks for boolean expressions that contain terminals that
+can be eliminated."##,
+ },
Lint { label: "clippy::panic", description: r##"Checks for usage of `panic!`."## },
Lint {
label: "clippy::panic_in_result_fn",
- description: r##"Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result."##,
+ description: r##"Checks for usage of `panic!` or assertions in a function of type result."##,
},
Lint {
label: "clippy::panicking_unwrap",
description: r##"Checks for calls of `unwrap[_err]()` that will always fail."##,
},
Lint {
+ label: "clippy::partial_pub_fields",
+ description: r##"Checks whether partial fields of a struct are public.
+
+Either make all fields of a type public, or make none of them public"##,
+ },
+ Lint {
label: "clippy::partialeq_ne_impl",
description: r##"Checks for manual re-implementations of `PartialEq::ne`."##,
},
Lint {
+ label: "clippy::partialeq_to_none",
+ description: r##"Checks for binary comparisons to a literal `Option::None`."##,
+ },
+ Lint {
label: "clippy::path_buf_push_overwrite",
description: r##"* Checks for [push](https://doc.rust-lang.org/std/path/struct.PathBuf.html#method.push)
calls on `PathBuf` that can cause overwrites."##,
},
Lint {
+ label: "clippy::path_ends_with_ext",
+ description: r##"Looks for calls to `Path::ends_with` calls where the argument looks like a file extension.
+
+By default, Clippy has a short list of known filenames that start with a dot
+but aren't necessarily file extensions (e.g. the `.git` folder), which are allowed by default.
+The `allowed-dotfiles` configuration can be used to allow additional
+file extensions that Clippy should not lint."##,
+ },
+ Lint {
label: "clippy::pattern_type_mismatch",
description: r##"Checks for patterns that aren't exact representations of the types
they are applied to.
@@ -6133,6 +11985,10 @@ this lint can still be used to highlight areas of interest and ensure a good und
of ownership semantics."##,
},
Lint {
+ label: "clippy::permissions_set_readonly_false",
+ description: r##"Checks for calls to `std::fs::Permissions.set_readonly` with argument `false`."##,
+ },
+ Lint {
label: "clippy::possible_missing_comma",
description: r##"Checks for possible missing comma in an array. It lints if
an array element is a binary operator expression and it lies on two lines."##,
@@ -6148,6 +12004,11 @@ numeric literal)
followed by a method call"##,
},
Lint {
+ label: "clippy::print_in_format_impl",
+ description: r##"Checks for usage of `println`, `print`, `eprintln` or `eprint` in an
+implementation of a formatting trait."##,
+ },
+ Lint {
label: "clippy::print_literal",
description: r##"This lint warns about the use of literals as `print!`/`println!` args."##,
},
@@ -6173,16 +12034,20 @@ print a newline."##,
},
Lint {
label: "clippy::ptr_arg",
- description: r##"This lint checks for function arguments of type `&String`
-or `&Vec` unless the references are mutable. It will also suggest you
-replace `.clone()` calls with the appropriate `.to_owned()`/`to_string()`
-calls."##,
+ description: r##"This lint checks for function arguments of type `&String`, `&Vec`,
+`&PathBuf`, and `Cow<_>`. It will also suggest you replace `.clone()` calls
+with the appropriate `.to_owned()`/`to_string()` calls."##,
},
Lint {
label: "clippy::ptr_as_ptr",
description: r##"Checks for `as` casts between raw pointers without changing its mutability,
namely `*const T` to `*const U` and `*mut T` to `*mut U`."##,
},
+ Lint {
+ label: "clippy::ptr_cast_constness",
+ description: r##"Checks for `as` casts between raw pointers which change its constness, namely `*const T` to
+`*mut T` and `*mut T` to `*const T`."##,
+ },
Lint { label: "clippy::ptr_eq", description: r##"Use `std::ptr::eq` when applicable"## },
Lint {
label: "clippy::ptr_offset_with_cast",
@@ -6193,11 +12058,27 @@ namely `*const T` to `*const U` and `*mut T` to `*mut U`."##,
label: "clippy::pub_enum_variant_names",
description: r##"Nothing. This lint has been deprecated."##,
},
+ Lint { label: "clippy::pub_use", description: r##"Restricts the usage of `pub use ...`"## },
+ Lint {
+ label: "clippy::pub_with_shorthand",
+ description: r##"Checks for usage of `pub(<loc>)` with `in`."##,
+ },
+ Lint {
+ label: "clippy::pub_without_shorthand",
+ description: r##"Checks for usage of `pub(<loc>)` without `in`.
+
+Note: As you cannot write a module's path in `pub(<loc>)`, this will only trigger on
+`pub(super)` and the like."##,
+ },
Lint {
label: "clippy::question_mark",
description: r##"Checks for expressions that could be replaced by the question mark operator."##,
},
Lint {
+ label: "clippy::question_mark_used",
+ description: r##"Checks for expressions that use the question mark operator and rejects them."##,
+ },
+ Lint {
label: "clippy::range_minus_one",
description: r##"Checks for inclusive ranges where 1 is subtracted from
the upper bound, e.g., `x..=(y-1)`."##,
@@ -6220,10 +12101,49 @@ upper bound, e.g., `x..(y+1)`."##,
label: "clippy::rc_buffer",
description: r##"Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`."##,
},
+ Lint {
+ label: "clippy::rc_clone_in_vec_init",
+ description: r##"Checks for reference-counted pointers (`Arc`, `Rc`, `rc::Weak`, and `sync::Weak`)
+in `vec![elem; len]`"##,
+ },
Lint { label: "clippy::rc_mutex", description: r##"Checks for `Rc<Mutex<T>>`."## },
Lint {
+ label: "clippy::read_line_without_trim",
+ description: r##"Looks for calls to [`Stdin::read_line`] to read a line from the standard input
+into a string, then later attempting to parse this string into a type without first trimming it, which will
+always fail because the string has a trailing newline in it."##,
+ },
+ Lint {
+ label: "clippy::read_zero_byte_vec",
+ description: r##"This lint catches reads into a zero-length `Vec`.
+Especially in the case of a call to `with_capacity`, this lint warns that read
+gets the number of bytes from the `Vec`'s length, not its capacity."##,
+ },
+ Lint {
+ label: "clippy::readonly_write_lock",
+ description: r##"Looks for calls to `RwLock::write` where the lock is only used for reading."##,
+ },
+ Lint {
+ label: "clippy::recursive_format_impl",
+ description: r##"Checks for format trait implementations (e.g. `Display`) with a recursive call to itself
+which uses `self` as a parameter.
+This is typically done indirectly with the `write!` macro or with `to_string()`."##,
+ },
+ Lint {
label: "clippy::redundant_allocation",
- description: r##"Checks for use of redundant allocations anywhere in the code."##,
+ description: r##"Checks for usage of redundant allocations anywhere in the code."##,
+ },
+ Lint {
+ label: "clippy::redundant_as_str",
+ description: r##"Checks for usage of `as_str()` on a `String`` chained with a method available on the `String` itself."##,
+ },
+ Lint {
+ label: "clippy::redundant_async_block",
+ description: r##"Checks for `async` block that only returns `await` on a future."##,
+ },
+ Lint {
+ label: "clippy::redundant_at_rest_pattern",
+ description: r##"Checks for `[all @ ..]` patterns."##,
},
Lint {
label: "clippy::redundant_clone",
@@ -6247,6 +12167,10 @@ are defined."##,
argument and can be replaced by referencing the method directly."##,
},
Lint {
+ label: "clippy::redundant_comparisons",
+ description: r##"Checks for ineffective double comparisons against constants."##,
+ },
+ Lint {
label: "clippy::redundant_else",
description: r##"Checks for `else` blocks that can be removed without changing semantics."##,
},
@@ -6260,6 +12184,14 @@ argument and can be replaced by referencing the method directly."##,
could be used."##,
},
Lint {
+ label: "clippy::redundant_guards",
+ description: r##"Checks for unnecessary guards in match expressions."##,
+ },
+ Lint {
+ label: "clippy::redundant_locals",
+ description: r##"Checks for redundant redefinitions of local bindings."##,
+ },
+ Lint {
label: "clippy::redundant_pattern",
description: r##"Checks for patterns in the form `name @ _`."##,
},
@@ -6283,19 +12215,22 @@ do not change the type."##,
description: r##"Checks for constants and statics with an explicit `'static` lifetime."##,
},
Lint {
- label: "clippy::ref_binding_to_reference",
- description: r##"Checks for `ref` bindings which create a reference to a reference."##,
+ label: "clippy::redundant_type_annotations",
+ description: r##"Warns about needless / redundant type annotations."##,
},
Lint {
- label: "clippy::ref_in_deref",
- description: r##"Checks for references in expressions that use
-auto dereference."##,
+ label: "clippy::ref_binding_to_reference",
+ description: r##"Checks for `ref` bindings which create a reference to a reference."##,
},
Lint {
label: "clippy::ref_option_ref",
description: r##"Checks for usage of `&Option<&T>`."##,
},
Lint {
+ label: "clippy::ref_patterns",
+ description: r##"Checks for usages of the `ref` keyword."##,
+ },
+ Lint {
label: "clippy::regex_macro",
description: r##"Nothing. This lint has been deprecated."##,
},
@@ -6314,10 +12249,19 @@ they are equivalent to `1`. (Related discussion in [rust-clippy#7306](https://gi
description: r##"Nothing. This lint has been deprecated."##,
},
Lint {
+ label: "clippy::reserve_after_initialization",
+ description: r##"Informs the user about a more concise way to create a vector with a known capacity."##,
+ },
+ Lint {
label: "clippy::rest_pat_in_fully_bound_structs",
description: r##"Checks for unnecessary '..' pattern binding on struct when all fields are explicitly matched."##,
},
Lint {
+ label: "clippy::result_large_err",
+ description: r##"Checks for functions that return `Result` with an unusually large
+`Err`-variant."##,
+ },
+ Lint {
label: "clippy::result_map_or_into_option",
description: r##"Checks for usage of `_.map_or(None, Some)`."##,
},
@@ -6339,7 +12283,7 @@ implements `std::error::Error`."##,
Lint {
label: "clippy::reversed_empty_ranges",
description: r##"Checks for range expressions `x..y` where both `x` and `y`
-are constant and `x` is greater or equal to `y`."##,
+are constant and `x` is greater to `y`. Also triggers if `x` is equal to `y` when they are conditions to a `for` loop."##,
},
Lint {
label: "clippy::same_functions_in_if_condition",
@@ -6361,6 +12305,16 @@ one from a trait, another not from trait."##,
`position()`, or `rposition()`) followed by a call to `is_some()` or `is_none()`."##,
},
Lint {
+ label: "clippy::seek_from_current",
+ description: r##"Checks an argument of `seek` method of `Seek` trait
+and if it start seek from `SeekFrom::Current(0)`, suggests `stream_position` instead."##,
+ },
+ Lint {
+ label: "clippy::seek_to_start_instead_of_rewind",
+ description: r##"Checks for jumps to the start of a stream that implements `Seek`
+and uses the `seek` method providing `Start` as parameter."##,
+ },
+ Lint {
label: "clippy::self_assignment",
description: r##"Checks for explicit self-assignments."##,
},
@@ -6370,7 +12324,7 @@ one from a trait, another not from trait."##,
},
Lint {
label: "clippy::self_named_module_files",
- description: r##"Checks that module layout uses only mod.rs files."##,
+ description: r##"Checks that module layout uses only `mod.rs` files."##,
},
Lint {
label: "clippy::semicolon_if_nothing_returned",
@@ -6378,6 +12332,15 @@ one from a trait, another not from trait."##,
`()` but is not followed by a semicolon."##,
},
Lint {
+ label: "clippy::semicolon_inside_block",
+ description: r##"Suggests moving the semicolon after a block to the inside of the block, after its last
+expression."##,
+ },
+ Lint {
+ label: "clippy::semicolon_outside_block",
+ description: r##"Suggests moving the semicolon from a block's final expression outside of the block."##,
+ },
+ Lint {
label: "clippy::separated_literal_suffix",
description: r##"Warns if literal suffixes are separated by an underscore.
To enforce separated literal suffix style,
@@ -6421,8 +12384,31 @@ post](http://llogiq.github.io/2015/07/30/traits.html) for further
information) instead of an inherent implementation."##,
},
Lint {
+ label: "clippy::should_panic_without_expect",
+ description: r##"Checks for `#[should_panic]` attributes without specifying the expected panic message."##,
+ },
+ Lint {
+ label: "clippy::significant_drop_in_scrutinee",
+ description: r##"Checks for temporaries returned from function calls in a match scrutinee that have the
+`clippy::has_significant_drop` attribute."##,
+ },
+ Lint {
+ label: "clippy::significant_drop_tightening",
+ description: r##"Searches for elements marked with `#[clippy::has_significant_drop]` that could be early
+dropped but are in fact dropped at the end of their scopes. In other words, enforces the
+tightening of their possible lifetimes."##,
+ },
+ Lint {
label: "clippy::similar_names",
- description: r##"Checks for names that are very similar and thus confusing."##,
+ description: r##"Checks for names that are very similar and thus confusing.
+
+Note: this lint looks for similar names throughout each
+scope. To allow it, you need to allow it on the scope
+level, not on the name that is reported."##,
+ },
+ Lint {
+ label: "clippy::single_call_fn",
+ description: r##"Checks for functions that are only used once. Does not lint tests."##,
},
Lint {
label: "clippy::single_char_add_str",
@@ -6430,6 +12416,11 @@ information) instead of an inherent implementation."##,
where `push`/`insert` with a `char` would work fine."##,
},
Lint {
+ label: "clippy::single_char_lifetime_names",
+ description: r##"Checks for lifetimes with names which are one character
+long."##,
+ },
+ Lint {
label: "clippy::single_char_pattern",
description: r##"Checks for string methods that receive a single-character
`str` as an argument, e.g., `_.split(x)`."##,
@@ -6445,7 +12436,12 @@ where `push`/`insert` with a `char` would work fine."##,
Lint {
label: "clippy::single_match",
description: r##"Checks for matches with a single arm where an `if let`
-will usually suffice."##,
+will usually suffice.
+
+This intentionally does not lint if there are comments
+inside of the other arm, so as to allow the user to document
+why having another explicit pattern with an empty body is necessary,
+or because the comments need to be preserved for other reasons."##,
},
Lint {
label: "clippy::single_match_else",
@@ -6453,12 +12449,21 @@ will usually suffice."##,
usually suffice."##,
},
Lint {
+ label: "clippy::single_range_in_vec_init",
+ description: r##"Checks for `Vec` or array initializations that contain only one range."##,
+ },
+ Lint {
label: "clippy::size_of_in_element_count",
description: r##"Detects expressions where
`size_of::<T>` or `size_of_val::<T>` is used as a
count of elements of type `T`"##,
},
Lint {
+ label: "clippy::size_of_ref",
+ description: r##"Checks for calls to `std::mem::size_of_val()` where the argument is
+a reference to a reference."##,
+ },
+ Lint {
label: "clippy::skip_while_next",
description: r##"Checks for usage of `_.skip_while(condition).next()`."##,
},
@@ -6469,10 +12474,18 @@ count of elements of type `T`"##,
Lint {
label: "clippy::stable_sort_primitive",
description: r##"When sorting primitive values (integers, bools, chars, as well
-as arrays, slices, and tuples of such items), it is better to
+as arrays, slices, and tuples of such items), it is typically better to
use an unstable sort than a stable sort."##,
},
Lint {
+ label: "clippy::std_instead_of_alloc",
+ description: r##"Finds items imported through `std` when available through `alloc`."##,
+ },
+ Lint {
+ label: "clippy::std_instead_of_core",
+ description: r##"Finds items imported through `std` when available through `core`."##,
+ },
+ Lint {
label: "clippy::str_to_string",
description: r##"This lint checks for `.to_string()` method calls on values of type `&str`."##,
},
@@ -6502,6 +12515,10 @@ match."##,
that contain only ASCII characters."##,
},
Lint {
+ label: "clippy::string_lit_chars_any",
+ description: r##"Checks for `<string_lit>.chars().any(|i| i == c)`."##,
+ },
+ Lint {
label: "clippy::string_slice",
description: r##"Checks for slice operations on strings"##,
},
@@ -6532,10 +12549,19 @@ subtracting elements in an Add impl."##,
},
Lint {
label: "clippy::suspicious_assignment_formatting",
- description: r##"Checks for use of the nonexistent `=*`, `=!` and `=-`
+ description: r##"Checks for usage of the non-existent `=*`, `=!` and `=-`
operators."##,
},
Lint {
+ label: "clippy::suspicious_command_arg_space",
+ description: r##"Checks for `Command::arg()` invocations that look like they
+should be multiple arguments instead, such as `arg(-t ext2)`."##,
+ },
+ Lint {
+ label: "clippy::suspicious_doc_comments",
+ description: r##"Detects the use of outer doc comments (`///`, `/**`) followed by a bang (`!`): `///!`"##,
+ },
+ Lint {
label: "clippy::suspicious_else_formatting",
description: r##"Checks for formatting of `else`. It lints if the `else`
is followed immediately by a newline or the `else` seems to be missing."##,
@@ -6562,12 +12588,24 @@ of binary operators nearby."##,
related functions with either zero or one splits."##,
},
Lint {
+ label: "clippy::suspicious_to_owned",
+ description: r##"Checks for the usage of `_.to_owned()`, on a `Cow<'_, _>`."##,
+ },
+ Lint {
label: "clippy::suspicious_unary_op_formatting",
description: r##"Checks the formatting of a unary operator on the right hand side
of a binary operator. It lints if there is no space between the binary and unary operators,
but there is a space between the unary and its operand."##,
},
Lint {
+ label: "clippy::suspicious_xor_used_as_pow",
+ description: r##"Warns for a Bitwise XOR (`^`) operator being probably confused as a powering. It will not trigger if any of the numbers are not in decimal."##,
+ },
+ Lint {
+ label: "clippy::swap_ptr_to_ref",
+ description: r##"Checks for calls to `core::mem::swap` where either parameter is derived from a pointer"##,
+ },
+ Lint {
label: "clippy::tabs_in_doc_comments",
description: r##"Checks doc comments for usage of tab characters."##,
},
@@ -6577,12 +12615,13 @@ but there is a space between the unary and its operand."##,
assign a value in it."##,
},
Lint {
- label: "clippy::to_digit_is_some",
- description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##,
+ label: "clippy::tests_outside_test_module",
+ description: r##"Triggers when a testing function (marked with the `#[test]` attribute) isn't inside a testing module
+(marked with `#[cfg(test)]`)."##,
},
Lint {
- label: "clippy::to_string_in_display",
- description: r##"Checks for uses of `to_string()` in `Display` traits."##,
+ label: "clippy::to_digit_is_some",
+ description: r##"Checks for `.to_digit(..).is_some()` on `char`s."##,
},
Lint {
label: "clippy::to_string_in_format_args",
@@ -6610,7 +12649,7 @@ in a macro that does formatting."##,
},
Lint {
label: "clippy::trait_duplication_in_bounds",
- description: r##"Checks for cases where generics are being used and multiple
+ description: r##"Checks for cases where generics or trait objects are being used and multiple
syntax specifications for trait bounds are used simultaneously."##,
},
Lint {
@@ -6634,6 +12673,15 @@ syntax specifications for trait bounds are used simultaneously."##,
description: r##"Checks for transmutes from an integer to a float."##,
},
Lint {
+ label: "clippy::transmute_int_to_non_zero",
+ description: r##"Checks for transmutes from integers to `NonZero*` types, and suggests their `new_unchecked`
+method instead."##,
+ },
+ Lint {
+ label: "clippy::transmute_null_to_fn",
+ description: r##"Checks for null function pointer creation through transmute."##,
+ },
+ Lint {
label: "clippy::transmute_num_to_bytes",
description: r##"Checks for transmutes from a number to an array of `u8`"##,
},
@@ -6647,6 +12695,11 @@ from a reference to a reference."##,
description: r##"Checks for transmutes from a pointer to a reference."##,
},
Lint {
+ label: "clippy::transmute_undefined_repr",
+ description: r##"Checks for transmutes between types which do not have a representation defined relative to
+each other."##,
+ },
+ Lint {
label: "clippy::transmutes_expressible_as_ptr_casts",
description: r##"Checks for transmutes that could be a pointer cast."##,
},
@@ -6655,6 +12708,10 @@ from a reference to a reference."##,
description: r##"Checks for transmute calls which would receive a null pointer."##,
},
Lint {
+ label: "clippy::trim_split_whitespace",
+ description: r##"Warns about calling `str::trim` (or variants) before `str::split_whitespace`."##,
+ },
+ Lint {
label: "clippy::trivial_regex",
description: r##"Checks for trivial [regex](https://crates.io/crates/regex)
creation (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`)."##,
@@ -6665,25 +12722,51 @@ creation (with `Regex::new`, `RegexBuilder::new`, or `RegexSet::new`)."##,
the argument type is `Copy` and small enough to be more efficient to always
pass by value."##,
},
- Lint { label: "clippy::try_err", description: r##"Checks for usages of `Err(x)?`."## },
+ Lint { label: "clippy::try_err", description: r##"Checks for usage of `Err(x)?`."## },
+ Lint {
+ label: "clippy::tuple_array_conversions",
+ description: r##"Checks for tuple<=>array conversions that are not done with `.into()`."##,
+ },
Lint {
label: "clippy::type_complexity",
description: r##"Checks for types used in structs, parameters and `let`
declarations above a certain complexity threshold."##,
},
Lint {
+ label: "clippy::type_id_on_box",
+ description: r##"Looks for calls to `<Box<dyn Any> as Any>::type_id`."##,
+ },
+ Lint {
label: "clippy::type_repetition_in_bounds",
description: r##"This lint warns about unnecessary type repetitions in trait bounds"##,
},
Lint {
- label: "clippy::undocumented_unsafe_blocks",
- description: r##"Checks for `unsafe` blocks without a `// Safety: ` comment
-explaining why the unsafe operations performed inside
-the block are safe."##,
+ label: "clippy::unchecked_duration_subtraction",
+ description: r##"Lints subtraction between an [`Instant`] and a [`Duration`]."##,
},
Lint {
- label: "clippy::undropped_manually_drops",
- description: r##"Prevents the safe `std::mem::drop` function from being called on `std::mem::ManuallyDrop`."##,
+ label: "clippy::undocumented_unsafe_blocks",
+ description: r##"Checks for `unsafe` blocks and impls without a `// SAFETY: ` comment
+explaining why the unsafe operations performed inside
+the block are safe.
+
+Note the comment must appear on the line(s) preceding the unsafe block
+with nothing appearing in between. The following is ok:
+```rust
+foo(
+ // SAFETY:
+ // This is a valid safety comment
+ unsafe { *x }
+)
+```
+But neither of these are:
+```rust
+// SAFETY:
+// This is not a valid safety comment
+foo(
+ /* SAFETY: Neither is this */ unsafe { *x },
+);
+```"##,
},
Lint {
label: "clippy::unicode_not_nfc",
@@ -6706,6 +12789,11 @@ This is commonly caused by calling `set_len()` right after allocating or
reserving a buffer with `new()`, `default()`, `with_capacity()`, or `reserve()`."##,
},
Lint {
+ label: "clippy::uninlined_format_args",
+ description: r##"Detect when a variable is not inlined in a format string,
+and suggests to inline it."##,
+ },
+ Lint {
label: "clippy::unit_arg",
description: r##"Checks for passing a unit value as an argument to a function without using a
unit literal (`()`)."##,
@@ -6723,23 +12811,40 @@ Fn(...) -> Ord where the implemented closure returns the unit type.
The lint also suggests to remove the semi-colon at the end of the statement if present."##,
},
Lint {
+ label: "clippy::unnecessary_box_returns",
+ description: r##"Checks for a return type containing a `Box<T>` where `T` implements `Sized`
+
+The lint ignores `Box<T>` where `T` is larger than `unnecessary_box_size`,
+as returning a large `T` directly may be detrimental to performance."##,
+ },
+ Lint {
label: "clippy::unnecessary_cast",
- description: r##"Checks for casts to the same type, casts of int literals to integer types
-and casts of float literals to float types."##,
+ description: r##"Checks for casts to the same type, casts of int literals to integer types, casts of float
+literals to float types and casts between raw pointers without changing type or constness."##,
},
Lint {
label: "clippy::unnecessary_filter_map",
- description: r##"Checks for `filter_map` calls which could be replaced by `filter` or `map`.
+ description: r##"Checks for `filter_map` calls that could be replaced by `filter` or `map`.
More specifically it checks if the closure provided is only performing one of the
filter or map operations and suggests the appropriate option."##,
},
Lint {
+ label: "clippy::unnecessary_find_map",
+ description: r##"Checks for `find_map` calls that could be replaced by `find` or `map`. More
+specifically it checks if the closure provided is only performing one of the
+find or map operations and suggests the appropriate option."##,
+ },
+ Lint {
label: "clippy::unnecessary_fold",
- description: r##"Checks for using `fold` when a more succinct alternative exists.
+ description: r##"Checks for usage of `fold` when a more succinct alternative exists.
Specifically, this checks for `fold`s which could be replaced by `any`, `all`,
`sum` or `product`."##,
},
Lint {
+ label: "clippy::unnecessary_join",
+ description: r##"Checks for usage of `.collect::<Vec<String>>().join()` on iterators."##,
+ },
+ Lint {
label: "clippy::unnecessary_lazy_evaluations",
description: r##"As the counterpart to `or_fun_call`, this lint looks for unnecessary
lazily evaluated closures on `Option` and `Result`.
@@ -6750,7 +12855,16 @@ simpler code:
- `and_then` to `and`
- `or_else` to `or`
- `get_or_insert_with` to `get_or_insert`
- - `ok_or_else` to `ok_or`"##,
+ - `ok_or_else` to `ok_or`
+ - `then` to `then_some` (for msrv >= 1.62.0)"##,
+ },
+ Lint {
+ label: "clippy::unnecessary_literal_unwrap",
+ description: r##"Checks for `.unwrap()` related calls on `Result`s and `Option`s that are constructed."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_map_on_constructor",
+ description: r##"Suggest removing the use of a may (or map_err) method when an Option or Result is being construted."##,
},
Lint {
label: "clippy::unnecessary_mut_passed",
@@ -6763,15 +12877,33 @@ requires an immutable reference."##,
sub-expression."##,
},
Lint {
+ label: "clippy::unnecessary_owned_empty_strings",
+ description: r##"Detects cases of owned empty strings being passed as an argument to a function expecting `&str`"##,
+ },
+ Lint {
+ label: "clippy::unnecessary_safety_comment",
+ description: r##"Checks for `// SAFETY: ` comments on safe code."##,
+ },
+ Lint {
+ label: "clippy::unnecessary_safety_doc",
+ description: r##"Checks for the doc comments of publicly visible
+safe functions and traits and warns if there is a `# Safety` section."##,
+ },
+ Lint {
label: "clippy::unnecessary_self_imports",
description: r##"Checks for imports ending in `::{self}`."##,
},
Lint {
label: "clippy::unnecessary_sort_by",
- description: r##"Detects uses of `Vec::sort_by` passing in a closure
+ description: r##"Checks for usage of `Vec::sort_by` passing in a closure
which compares the two arguments, either directly or indirectly."##,
},
Lint {
+ label: "clippy::unnecessary_struct_initialization",
+ description: r##"Checks for initialization of a `struct` by copying a base without setting
+any field."##,
+ },
+ Lint {
label: "clippy::unnecessary_to_owned",
description: r##"Checks for unnecessary calls to [`ToOwned::to_owned`](https://doc.rust-lang.org/std/borrow/trait.ToOwned.html#tymethod.to_owned)
and other `to_owned`-like functions."##,
@@ -6855,10 +12987,24 @@ types have different ABI, size or alignment."##,
description: r##"Nothing. This lint has been deprecated."##,
},
Lint {
+ label: "clippy::unused_format_specs",
+ description: r##"Detects [formatting parameters] that have no effect on the output of
+`format!()`, `println!()` or similar macros."##,
+ },
+ Lint {
label: "clippy::unused_io_amount",
description: r##"Checks for unused written/read amount."##,
},
Lint {
+ label: "clippy::unused_peekable",
+ description: r##"Checks for the creation of a `peekable` iterator that is never `.peek()`ed"##,
+ },
+ Lint {
+ label: "clippy::unused_rounding",
+ description: r##"Detects cases where a whole-number literal float is being rounded, using
+the `floor`, `ceil`, or `round` methods."##,
+ },
+ Lint {
label: "clippy::unused_self",
description: r##"Checks methods that contain a `self` argument but don't use it"##,
},
@@ -6876,13 +13022,17 @@ by nibble or byte."##,
description: r##"Checks for functions of type `Result` that contain `expect()` or `unwrap()`"##,
},
Lint {
- label: "clippy::unwrap_or_else_default",
- description: r##"Checks for usages of `_.unwrap_or_else(Default::default)` on `Option` and
-`Result` values."##,
+ label: "clippy::unwrap_or_default",
+ description: r##"Checks for usages of the following functions with an argument that constructs a default value
+(e.g., `Default::default` or `String::new`):
+- `unwrap_or`
+- `unwrap_or_else`
+- `or_insert`
+- `or_insert_with`"##,
},
Lint {
label: "clippy::unwrap_used",
- description: r##"Checks for `.unwrap()` calls on `Option`s and on `Result`s."##,
+ description: r##"Checks for `.unwrap()` or `.unwrap_err()` calls on `Result`s and `.unwrap()` call on `Option`s."##,
},
Lint {
label: "clippy::upper_case_acronyms",
@@ -6890,7 +13040,7 @@ by nibble or byte."##,
},
Lint {
label: "clippy::use_debug",
- description: r##"Checks for use of `Debug` formatting. The purpose of this
+ description: r##"Checks for usage of `Debug` formatting. The purpose of this
lint is to catch debugging remnants."##,
},
Lint {
@@ -6913,10 +13063,17 @@ types before and after the call are the same."##,
description: r##"Checks for `extern crate` and `use` items annotated with
lint attributes.
-This lint permits `#[allow(unused_imports)]`, `#[allow(deprecated)]`,
-`#[allow(unreachable_pub)]`, `#[allow(clippy::wildcard_imports)]` and
-`#[allow(clippy::enum_glob_use)]` on `use` items and `#[allow(unused_imports)]` on
-`extern crate` items with a `#[macro_use]` attribute."##,
+This lint permits lint attributes for lints emitted on the items themself.
+For `use` items these lints are:
+* deprecated
+* unreachable_pub
+* unused_imports
+* clippy::enum_glob_use
+* clippy::macro_use_imports
+* clippy::wildcard_imports
+
+For `extern crate` items these lints are:
+* `unused_imports` on items with `#[macro_use]`"##,
},
Lint {
label: "clippy::useless_conversion",
@@ -6940,17 +13097,24 @@ and transmutes that could be a cast."##,
},
Lint {
label: "clippy::useless_vec",
- description: r##"Checks for usage of `&vec![..]` when using `&[..]` would
+ description: r##"Checks for usage of `vec![..]` when using `[..]` would
be possible."##,
},
Lint {
label: "clippy::vec_box",
- description: r##"Checks for use of `Vec<Box<T>>` where T: Sized anywhere in the code.
+ description: r##"Checks for usage of `Vec<Box<T>>` where T: Sized anywhere in the code.
Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information."##,
},
Lint {
label: "clippy::vec_init_then_push",
- description: r##"Checks for calls to `push` immediately after creating a new `Vec`."##,
+ description: r##"Checks for calls to `push` immediately after creating a new `Vec`.
+
+If the `Vec` is created using `with_capacity` this will only lint if the capacity is a
+constant and the number of pushes is greater than or equal to the initial capacity.
+
+If the `Vec` is extended after the initial sequence of pushes and it was default initialized
+then this will only lint after there were at least four pushes. This number may change in
+the future."##,
},
Lint {
label: "clippy::vec_resize_to_zero",
@@ -6963,7 +13127,7 @@ to `trailing_zeros`"##,
},
Lint {
label: "clippy::verbose_file_reads",
- description: r##"Checks for use of File::read_to_end and File::read_to_string."##,
+ description: r##"Checks for usage of File::read_to_end and File::read_to_string."##,
},
Lint {
label: "clippy::vtable_address_comparisons",
@@ -7020,18 +13184,19 @@ print a newline."##,
},
Lint {
label: "clippy::wrong_self_convention",
- description: r##"Checks for methods with certain name prefixes and which
-doesn't match how self is taken. The actual rules are:
-
-|Prefix |Postfix |`self` taken | `self` type |
-|-------|------------|-----------------------|--------------|
-|`as_` | none |`&self` or `&mut self` | any |
-|`from_`| none | none | any |
-|`into_`| none |`self` | any |
-|`is_` | none |`&self` or none | any |
-|`to_` | `_mut` |`&mut self` | any |
-|`to_` | not `_mut` |`self` | `Copy` |
-|`to_` | not `_mut` |`&self` | not `Copy` |
+ description: r##"Checks for methods with certain name prefixes or suffixes, and which
+do not adhere to standard conventions regarding how `self` is taken.
+The actual rules are:
+
+|Prefix |Postfix |`self` taken | `self` type |
+|-------|------------|-------------------------------|--------------|
+|`as_` | none |`&self` or `&mut self` | any |
+|`from_`| none | none | any |
+|`into_`| none |`self` | any |
+|`is_` | none |`&mut self` or `&self` or none | any |
+|`to_` | `_mut` |`&mut self` | any |
+|`to_` | not `_mut` |`self` | `Copy` |
+|`to_` | not `_mut` |`&self` | not `Copy` |
Note: Clippy doesn't trigger methods with `to_` prefix in:
- Traits definition.
@@ -7086,15 +13251,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::complexity",
- description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrowed_box, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::manual_filter_map, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_borrowed_reference, clippy::needless_lifetimes, clippy::needless_option_as_deref, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_closure_call, clippy::redundant_slicing, clippy::ref_in_deref, clippy::repeat_once, clippy::result_map_unit_fn, clippy::search_is_some, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##,
+ description: r##"lint group for: clippy::bind_instead_of_map, clippy::bool_comparison, clippy::borrow_deref_ref, clippy::borrowed_box, clippy::bytes_count_to_len, clippy::char_lit_as_u8, clippy::clone_on_copy, clippy::crosspointer_transmute, clippy::default_constructed_unit_structs, clippy::deprecated_cfg_attr, clippy::deref_addrof, clippy::derivable_impls, clippy::diverging_sub_expression, clippy::double_comparisons, clippy::double_parens, clippy::duration_subsec, clippy::excessive_nesting, clippy::explicit_auto_deref, clippy::explicit_counter_loop, clippy::explicit_write, clippy::extra_unused_lifetimes, clippy::extra_unused_type_parameters, clippy::filter_map_identity, clippy::filter_next, clippy::flat_map_identity, clippy::get_last_with_len, clippy::identity_op, clippy::inspect_for_each, clippy::int_plus_one, clippy::iter_count, clippy::iter_kv_map, clippy::let_with_type_underscore, clippy::manual_filter, clippy::manual_filter_map, clippy::manual_find, clippy::manual_find_map, clippy::manual_flatten, clippy::manual_hash_one, clippy::manual_main_separator_str, clippy::manual_range_patterns, clippy::manual_rem_euclid, clippy::manual_slice_size_calculation, clippy::manual_split_once, clippy::manual_strip, clippy::manual_swap, clippy::manual_unwrap_or, clippy::map_flatten, clippy::map_identity, clippy::match_as_ref, clippy::match_single_binding, clippy::needless_arbitrary_self_type, clippy::needless_bool, clippy::needless_bool_assign, clippy::needless_borrowed_reference, clippy::needless_if, clippy::needless_lifetimes, clippy::needless_match, clippy::needless_option_as_deref, clippy::needless_option_take, clippy::needless_question_mark, clippy::needless_splitn, clippy::needless_update, clippy::neg_cmp_op_on_partial_ord, clippy::no_effect, clippy::nonminimal_bool, clippy::only_used_in_recursion, clippy::option_as_ref_deref, clippy::option_filter_map, clippy::option_map_unit_fn, clippy::or_then_unwrap, clippy::overflow_check_conditional, clippy::partialeq_ne_impl, clippy::precedence, clippy::ptr_offset_with_cast, clippy::range_zip_with_len, clippy::redundant_as_str, clippy::redundant_async_block, clippy::redundant_at_rest_pattern, clippy::redundant_closure_call, clippy::redundant_guards, clippy::redundant_slicing, clippy::repeat_once, clippy::reserve_after_initialization, clippy::result_map_unit_fn, clippy::search_is_some, clippy::seek_from_current, clippy::seek_to_start_instead_of_rewind, clippy::short_circuit_statement, clippy::single_element_loop, clippy::skip_while_next, clippy::string_from_utf8_as_bytes, clippy::strlen_on_c_strings, clippy::temporary_assignment, clippy::too_many_arguments, clippy::transmute_bytes_to_str, clippy::transmute_float_to_int, clippy::transmute_int_to_bool, clippy::transmute_int_to_char, clippy::transmute_int_to_float, clippy::transmute_int_to_non_zero, clippy::transmute_num_to_bytes, clippy::transmute_ptr_to_ref, clippy::transmutes_expressible_as_ptr_casts, clippy::type_complexity, clippy::unit_arg, clippy::unnecessary_cast, clippy::unnecessary_filter_map, clippy::unnecessary_find_map, clippy::unnecessary_literal_unwrap, clippy::unnecessary_map_on_constructor, clippy::unnecessary_operation, clippy::unnecessary_sort_by, clippy::unnecessary_unwrap, clippy::unneeded_wildcard_pattern, clippy::unused_format_specs, clippy::useless_asref, clippy::useless_conversion, clippy::useless_format, clippy::useless_transmute, clippy::vec_box, clippy::while_let_loop, clippy::wildcard_in_or_patterns, clippy::zero_divided_by_zero, clippy::zero_prefixed_literal"##,
},
children: &[
"clippy::bind_instead_of_map",
"clippy::bool_comparison",
+ "clippy::borrow_deref_ref",
"clippy::borrowed_box",
+ "clippy::bytes_count_to_len",
"clippy::char_lit_as_u8",
"clippy::clone_on_copy",
"clippy::crosspointer_transmute",
+ "clippy::default_constructed_unit_structs",
"clippy::deprecated_cfg_attr",
"clippy::deref_addrof",
"clippy::derivable_impls",
@@ -7102,9 +13270,12 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::double_comparisons",
"clippy::double_parens",
"clippy::duration_subsec",
+ "clippy::excessive_nesting",
+ "clippy::explicit_auto_deref",
"clippy::explicit_counter_loop",
"clippy::explicit_write",
"clippy::extra_unused_lifetimes",
+ "clippy::extra_unused_type_parameters",
"clippy::filter_map_identity",
"clippy::filter_next",
"clippy::flat_map_identity",
@@ -7113,9 +13284,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::inspect_for_each",
"clippy::int_plus_one",
"clippy::iter_count",
+ "clippy::iter_kv_map",
+ "clippy::let_with_type_underscore",
+ "clippy::manual_filter",
"clippy::manual_filter_map",
+ "clippy::manual_find",
"clippy::manual_find_map",
"clippy::manual_flatten",
+ "clippy::manual_hash_one",
+ "clippy::manual_main_separator_str",
+ "clippy::manual_range_patterns",
+ "clippy::manual_rem_euclid",
+ "clippy::manual_slice_size_calculation",
"clippy::manual_split_once",
"clippy::manual_strip",
"clippy::manual_swap",
@@ -7126,29 +13306,41 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::match_single_binding",
"clippy::needless_arbitrary_self_type",
"clippy::needless_bool",
+ "clippy::needless_bool_assign",
"clippy::needless_borrowed_reference",
+ "clippy::needless_if",
"clippy::needless_lifetimes",
+ "clippy::needless_match",
"clippy::needless_option_as_deref",
+ "clippy::needless_option_take",
"clippy::needless_question_mark",
"clippy::needless_splitn",
"clippy::needless_update",
"clippy::neg_cmp_op_on_partial_ord",
"clippy::no_effect",
"clippy::nonminimal_bool",
+ "clippy::only_used_in_recursion",
"clippy::option_as_ref_deref",
"clippy::option_filter_map",
"clippy::option_map_unit_fn",
+ "clippy::or_then_unwrap",
"clippy::overflow_check_conditional",
"clippy::partialeq_ne_impl",
"clippy::precedence",
"clippy::ptr_offset_with_cast",
"clippy::range_zip_with_len",
+ "clippy::redundant_as_str",
+ "clippy::redundant_async_block",
+ "clippy::redundant_at_rest_pattern",
"clippy::redundant_closure_call",
+ "clippy::redundant_guards",
"clippy::redundant_slicing",
- "clippy::ref_in_deref",
"clippy::repeat_once",
+ "clippy::reserve_after_initialization",
"clippy::result_map_unit_fn",
"clippy::search_is_some",
+ "clippy::seek_from_current",
+ "clippy::seek_to_start_instead_of_rewind",
"clippy::short_circuit_statement",
"clippy::single_element_loop",
"clippy::skip_while_next",
@@ -7161,6 +13353,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::transmute_int_to_bool",
"clippy::transmute_int_to_char",
"clippy::transmute_int_to_float",
+ "clippy::transmute_int_to_non_zero",
"clippy::transmute_num_to_bytes",
"clippy::transmute_ptr_to_ref",
"clippy::transmutes_expressible_as_ptr_casts",
@@ -7168,13 +13361,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::unit_arg",
"clippy::unnecessary_cast",
"clippy::unnecessary_filter_map",
+ "clippy::unnecessary_find_map",
+ "clippy::unnecessary_literal_unwrap",
+ "clippy::unnecessary_map_on_constructor",
"clippy::unnecessary_operation",
"clippy::unnecessary_sort_by",
"clippy::unnecessary_unwrap",
"clippy::unneeded_wildcard_pattern",
+ "clippy::unused_format_specs",
"clippy::useless_asref",
"clippy::useless_conversion",
"clippy::useless_format",
+ "clippy::useless_transmute",
"clippy::vec_box",
"clippy::while_let_loop",
"clippy::wildcard_in_or_patterns",
@@ -7185,7 +13383,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::correctness",
- description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_ref_to_mut, clippy::clone_double_ref, clippy::cmp_nan, clippy::deprecated_semver, clippy::derive_hash_xor_eq, clippy::derive_ord_xor_partial_ord, clippy::drop_copy, clippy::drop_ref, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::forget_copy, clippy::forget_ref, clippy::if_let_mutex, clippy::if_same_then_else, clippy::ifs_same_cond, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::logic_bug, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::to_string_in_display, clippy::transmuting_null, clippy::undropped_manually_drops, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::vtable_address_comparisons, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##,
+ description: r##"lint group for: clippy::absurd_extreme_comparisons, clippy::almost_swapped, clippy::approx_constant, clippy::async_yields_async, clippy::bad_bit_mask, clippy::cast_slice_different_sizes, clippy::deprecated_semver, clippy::derive_ord_xor_partial_ord, clippy::derived_hash_with_manual_eq, clippy::enum_clike_unportable_variant, clippy::eq_op, clippy::erasing_op, clippy::fn_address_comparisons, clippy::if_let_mutex, clippy::if_same_then_else, clippy::ifs_same_cond, clippy::impossible_comparisons, clippy::ineffective_bit_mask, clippy::infinite_iter, clippy::inherent_to_string_shadow_display, clippy::inline_fn_without_body, clippy::invalid_null_ptr_usage, clippy::invalid_regex, clippy::invisible_characters, clippy::iter_next_loop, clippy::iter_skip_zero, clippy::iterator_step_by_zero, clippy::let_underscore_lock, clippy::match_str_case_mismatch, clippy::mem_replace_with_uninit, clippy::min_max, clippy::mismatched_target_os, clippy::mistyped_literal_suffixes, clippy::modulo_one, clippy::mut_from_ref, clippy::never_loop, clippy::non_octal_unix_permissions, clippy::nonsensical_open_options, clippy::not_unsafe_ptr_arg_deref, clippy::option_env_unwrap, clippy::out_of_bounds_indexing, clippy::overly_complex_bool_expr, clippy::panicking_unwrap, clippy::possible_missing_comma, clippy::read_line_without_trim, clippy::read_zero_byte_vec, clippy::recursive_format_impl, clippy::redundant_comparisons, clippy::redundant_locals, clippy::reversed_empty_ranges, clippy::self_assignment, clippy::serde_api_misuse, clippy::size_of_in_element_count, clippy::suspicious_splitn, clippy::transmute_null_to_fn, clippy::transmuting_null, clippy::uninit_assumed_init, clippy::uninit_vec, clippy::unit_cmp, clippy::unit_hash, clippy::unit_return_expecting_ord, clippy::unsound_collection_transmute, clippy::unused_io_amount, clippy::useless_attribute, clippy::vec_resize_to_zero, clippy::vtable_address_comparisons, clippy::while_immutable_condition, clippy::wrong_transmute, clippy::zst_offset"##,
},
children: &[
"clippy::absurd_extreme_comparisons",
@@ -7193,23 +13391,18 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::approx_constant",
"clippy::async_yields_async",
"clippy::bad_bit_mask",
- "clippy::cast_ref_to_mut",
- "clippy::clone_double_ref",
- "clippy::cmp_nan",
+ "clippy::cast_slice_different_sizes",
"clippy::deprecated_semver",
- "clippy::derive_hash_xor_eq",
"clippy::derive_ord_xor_partial_ord",
- "clippy::drop_copy",
- "clippy::drop_ref",
+ "clippy::derived_hash_with_manual_eq",
"clippy::enum_clike_unportable_variant",
"clippy::eq_op",
"clippy::erasing_op",
"clippy::fn_address_comparisons",
- "clippy::forget_copy",
- "clippy::forget_ref",
"clippy::if_let_mutex",
"clippy::if_same_then_else",
"clippy::ifs_same_cond",
+ "clippy::impossible_comparisons",
"clippy::ineffective_bit_mask",
"clippy::infinite_iter",
"clippy::inherent_to_string_shadow_display",
@@ -7218,9 +13411,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::invalid_regex",
"clippy::invisible_characters",
"clippy::iter_next_loop",
+ "clippy::iter_skip_zero",
"clippy::iterator_step_by_zero",
"clippy::let_underscore_lock",
- "clippy::logic_bug",
"clippy::match_str_case_mismatch",
"clippy::mem_replace_with_uninit",
"clippy::min_max",
@@ -7234,16 +13427,21 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::not_unsafe_ptr_arg_deref",
"clippy::option_env_unwrap",
"clippy::out_of_bounds_indexing",
+ "clippy::overly_complex_bool_expr",
"clippy::panicking_unwrap",
"clippy::possible_missing_comma",
+ "clippy::read_line_without_trim",
+ "clippy::read_zero_byte_vec",
+ "clippy::recursive_format_impl",
+ "clippy::redundant_comparisons",
+ "clippy::redundant_locals",
"clippy::reversed_empty_ranges",
"clippy::self_assignment",
"clippy::serde_api_misuse",
"clippy::size_of_in_element_count",
"clippy::suspicious_splitn",
- "clippy::to_string_in_display",
+ "clippy::transmute_null_to_fn",
"clippy::transmuting_null",
- "clippy::undropped_manually_drops",
"clippy::uninit_assumed_init",
"clippy::uninit_vec",
"clippy::unit_cmp",
@@ -7286,45 +13484,66 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::nursery",
- description: r##"lint group for: clippy::branches_sharing_code, clippy::cognitive_complexity, clippy::debug_assert_with_mut_call, clippy::disallowed_methods, clippy::disallowed_types, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::imprecise_flops, clippy::index_refutable_slice, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::path_buf_push_overwrite, clippy::redundant_pub_crate, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trivial_regex, clippy::use_self, clippy::useless_let_if_seq, clippy::useless_transmute"##,
+ description: r##"lint group for: clippy::as_ptr_cast_mut, clippy::branches_sharing_code, clippy::clear_with_drain, clippy::cognitive_complexity, clippy::collection_is_never_read, clippy::debug_assert_with_mut_call, clippy::derive_partial_eq_without_eq, clippy::empty_line_after_doc_comments, clippy::empty_line_after_outer_attr, clippy::equatable_if_let, clippy::fallible_impl_from, clippy::future_not_send, clippy::implied_bounds_in_impls, clippy::imprecise_flops, clippy::iter_on_empty_collections, clippy::iter_on_single_items, clippy::iter_with_drain, clippy::large_stack_frames, clippy::manual_clamp, clippy::missing_const_for_fn, clippy::mutex_integer, clippy::needless_collect, clippy::needless_pass_by_ref_mut, clippy::non_send_fields_in_send_ty, clippy::nonstandard_macro_braces, clippy::option_if_let_else, clippy::or_fun_call, clippy::path_buf_push_overwrite, clippy::readonly_write_lock, clippy::redundant_clone, clippy::redundant_pub_crate, clippy::significant_drop_in_scrutinee, clippy::significant_drop_tightening, clippy::string_lit_as_bytes, clippy::suboptimal_flops, clippy::suspicious_operation_groupings, clippy::trailing_empty_array, clippy::trait_duplication_in_bounds, clippy::transmute_undefined_repr, clippy::trivial_regex, clippy::tuple_array_conversions, clippy::type_repetition_in_bounds, clippy::unnecessary_struct_initialization, clippy::unused_peekable, clippy::unused_rounding, clippy::use_self, clippy::useless_let_if_seq"##,
},
children: &[
+ "clippy::as_ptr_cast_mut",
"clippy::branches_sharing_code",
+ "clippy::clear_with_drain",
"clippy::cognitive_complexity",
+ "clippy::collection_is_never_read",
"clippy::debug_assert_with_mut_call",
- "clippy::disallowed_methods",
- "clippy::disallowed_types",
+ "clippy::derive_partial_eq_without_eq",
+ "clippy::empty_line_after_doc_comments",
"clippy::empty_line_after_outer_attr",
"clippy::equatable_if_let",
"clippy::fallible_impl_from",
"clippy::future_not_send",
+ "clippy::implied_bounds_in_impls",
"clippy::imprecise_flops",
- "clippy::index_refutable_slice",
+ "clippy::iter_on_empty_collections",
+ "clippy::iter_on_single_items",
+ "clippy::iter_with_drain",
+ "clippy::large_stack_frames",
+ "clippy::manual_clamp",
"clippy::missing_const_for_fn",
"clippy::mutex_integer",
+ "clippy::needless_collect",
+ "clippy::needless_pass_by_ref_mut",
"clippy::non_send_fields_in_send_ty",
"clippy::nonstandard_macro_braces",
"clippy::option_if_let_else",
+ "clippy::or_fun_call",
"clippy::path_buf_push_overwrite",
+ "clippy::readonly_write_lock",
+ "clippy::redundant_clone",
"clippy::redundant_pub_crate",
+ "clippy::significant_drop_in_scrutinee",
+ "clippy::significant_drop_tightening",
"clippy::string_lit_as_bytes",
"clippy::suboptimal_flops",
"clippy::suspicious_operation_groupings",
"clippy::trailing_empty_array",
+ "clippy::trait_duplication_in_bounds",
+ "clippy::transmute_undefined_repr",
"clippy::trivial_regex",
+ "clippy::tuple_array_conversions",
+ "clippy::type_repetition_in_bounds",
+ "clippy::unnecessary_struct_initialization",
+ "clippy::unused_peekable",
+ "clippy::unused_rounding",
"clippy::use_self",
"clippy::useless_let_if_seq",
- "clippy::useless_transmute",
],
},
LintGroup {
lint: Lint {
label: "clippy::pedantic",
- description: r##"lint group for: clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::implicit_clone, clippy::implicit_hasher, clippy::implicit_saturating_sub, clippy::inconsistent_struct_constructor, clippy::inefficient_to_string, clippy::inline_always, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_not_returning_iterator, clippy::large_digit_groups, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::let_underscore_drop, clippy::let_unit_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_ok_or, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::missing_errors_doc, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::no_effect_underscore_binding, clippy::option_option, clippy::ptr_as_ptr, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::similar_names, clippy::single_match_else, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::too_many_lines, clippy::trait_duplication_in_bounds, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::type_repetition_in_bounds, clippy::unicode_not_nfc, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
+ description: r##"lint group for: clippy::bool_to_int_with_if, clippy::borrow_as_ptr, clippy::case_sensitive_file_extension_comparisons, clippy::cast_lossless, clippy::cast_possible_truncation, clippy::cast_possible_wrap, clippy::cast_precision_loss, clippy::cast_ptr_alignment, clippy::cast_sign_loss, clippy::checked_conversions, clippy::cloned_instead_of_copied, clippy::copy_iterator, clippy::default_trait_access, clippy::doc_link_with_quotes, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::expl_impl_clone_on_copy, clippy::explicit_deref_methods, clippy::explicit_into_iter_loop, clippy::explicit_iter_loop, clippy::filter_map_next, clippy::flat_map_option, clippy::float_cmp, clippy::fn_params_excessive_bools, clippy::from_iter_instead_of_collect, clippy::if_not_else, clippy::ignored_unit_patterns, clippy::implicit_clone, clippy::implicit_hasher, clippy::inconsistent_struct_constructor, clippy::index_refutable_slice, clippy::inefficient_to_string, clippy::inline_always, clippy::into_iter_without_iter, clippy::invalid_upcast_comparisons, clippy::items_after_statements, clippy::iter_not_returning_iterator, clippy::iter_without_into_iter, clippy::large_digit_groups, clippy::large_futures, clippy::large_stack_arrays, clippy::large_types_passed_by_value, clippy::linkedlist, clippy::macro_use_imports, clippy::manual_assert, clippy::manual_instant_elapsed, clippy::manual_let_else, clippy::manual_ok_or, clippy::manual_string_new, clippy::many_single_char_names, clippy::map_unwrap_or, clippy::match_bool, clippy::match_on_vec_items, clippy::match_same_arms, clippy::match_wild_err_arm, clippy::match_wildcard_for_single_variants, clippy::maybe_infinite_iter, clippy::mismatching_type_param_order, clippy::missing_errors_doc, clippy::missing_fields_in_debug, clippy::missing_panics_doc, clippy::module_name_repetitions, clippy::must_use_candidate, clippy::mut_mut, clippy::naive_bytecount, clippy::needless_bitwise_bool, clippy::needless_continue, clippy::needless_for_each, clippy::needless_pass_by_value, clippy::needless_raw_string_hashes, clippy::no_effect_underscore_binding, clippy::no_mangle_with_rust_abi, clippy::option_option, clippy::ptr_as_ptr, clippy::ptr_cast_constness, clippy::range_minus_one, clippy::range_plus_one, clippy::redundant_closure_for_method_calls, clippy::redundant_else, clippy::ref_binding_to_reference, clippy::ref_option_ref, clippy::return_self_not_must_use, clippy::same_functions_in_if_condition, clippy::semicolon_if_nothing_returned, clippy::should_panic_without_expect, clippy::similar_names, clippy::single_match_else, clippy::stable_sort_primitive, clippy::string_add_assign, clippy::struct_excessive_bools, clippy::too_many_lines, clippy::transmute_ptr_to_ptr, clippy::trivially_copy_pass_by_ref, clippy::unchecked_duration_subtraction, clippy::unicode_not_nfc, clippy::uninlined_format_args, clippy::unnecessary_box_returns, clippy::unnecessary_join, clippy::unnecessary_wraps, clippy::unnested_or_patterns, clippy::unreadable_literal, clippy::unsafe_derive_deserialize, clippy::unused_async, clippy::unused_self, clippy::used_underscore_binding, clippy::verbose_bit_mask, clippy::wildcard_imports, clippy::zero_sized_map_values"##,
},
children: &[
- "clippy::await_holding_lock",
- "clippy::await_holding_refcell_ref",
+ "clippy::bool_to_int_with_if",
+ "clippy::borrow_as_ptr",
"clippy::case_sensitive_file_extension_comparisons",
"clippy::cast_lossless",
"clippy::cast_possible_truncation",
@@ -7336,6 +13555,7 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::cloned_instead_of_copied",
"clippy::copy_iterator",
"clippy::default_trait_access",
+ "clippy::doc_link_with_quotes",
"clippy::doc_markdown",
"clippy::empty_enum",
"clippy::enum_glob_use",
@@ -7349,24 +13569,29 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::fn_params_excessive_bools",
"clippy::from_iter_instead_of_collect",
"clippy::if_not_else",
+ "clippy::ignored_unit_patterns",
"clippy::implicit_clone",
"clippy::implicit_hasher",
- "clippy::implicit_saturating_sub",
"clippy::inconsistent_struct_constructor",
+ "clippy::index_refutable_slice",
"clippy::inefficient_to_string",
"clippy::inline_always",
+ "clippy::into_iter_without_iter",
"clippy::invalid_upcast_comparisons",
"clippy::items_after_statements",
"clippy::iter_not_returning_iterator",
+ "clippy::iter_without_into_iter",
"clippy::large_digit_groups",
+ "clippy::large_futures",
"clippy::large_stack_arrays",
"clippy::large_types_passed_by_value",
- "clippy::let_underscore_drop",
- "clippy::let_unit_value",
"clippy::linkedlist",
"clippy::macro_use_imports",
"clippy::manual_assert",
+ "clippy::manual_instant_elapsed",
+ "clippy::manual_let_else",
"clippy::manual_ok_or",
+ "clippy::manual_string_new",
"clippy::many_single_char_names",
"clippy::map_unwrap_or",
"clippy::match_bool",
@@ -7375,7 +13600,9 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::match_wild_err_arm",
"clippy::match_wildcard_for_single_variants",
"clippy::maybe_infinite_iter",
+ "clippy::mismatching_type_param_order",
"clippy::missing_errors_doc",
+ "clippy::missing_fields_in_debug",
"clippy::missing_panics_doc",
"clippy::module_name_repetitions",
"clippy::must_use_candidate",
@@ -7385,27 +13612,35 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::needless_continue",
"clippy::needless_for_each",
"clippy::needless_pass_by_value",
+ "clippy::needless_raw_string_hashes",
"clippy::no_effect_underscore_binding",
+ "clippy::no_mangle_with_rust_abi",
"clippy::option_option",
"clippy::ptr_as_ptr",
+ "clippy::ptr_cast_constness",
"clippy::range_minus_one",
"clippy::range_plus_one",
"clippy::redundant_closure_for_method_calls",
"clippy::redundant_else",
"clippy::ref_binding_to_reference",
"clippy::ref_option_ref",
+ "clippy::return_self_not_must_use",
"clippy::same_functions_in_if_condition",
"clippy::semicolon_if_nothing_returned",
+ "clippy::should_panic_without_expect",
"clippy::similar_names",
"clippy::single_match_else",
+ "clippy::stable_sort_primitive",
"clippy::string_add_assign",
"clippy::struct_excessive_bools",
"clippy::too_many_lines",
- "clippy::trait_duplication_in_bounds",
"clippy::transmute_ptr_to_ptr",
"clippy::trivially_copy_pass_by_ref",
- "clippy::type_repetition_in_bounds",
+ "clippy::unchecked_duration_subtraction",
"clippy::unicode_not_nfc",
+ "clippy::uninlined_format_args",
+ "clippy::unnecessary_box_returns",
+ "clippy::unnecessary_join",
"clippy::unnecessary_wraps",
"clippy::unnested_or_patterns",
"clippy::unreadable_literal",
@@ -7421,29 +13656,33 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::perf",
- description: r##"lint group for: clippy::box_collection, clippy::boxed_local, clippy::cmp_owned, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_in_format_args, clippy::iter_nth, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_str_repeat, clippy::map_entry, clippy::mutex_atomic, clippy::needless_collect, clippy::or_fun_call, clippy::redundant_allocation, clippy::redundant_clone, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::stable_sort_primitive, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push"##,
+ description: r##"lint group for: clippy::box_collection, clippy::box_default, clippy::boxed_local, clippy::cmp_owned, clippy::collapsible_str_replace, clippy::drain_collect, clippy::expect_fun_call, clippy::extend_with_drain, clippy::format_collect, clippy::format_in_format_args, clippy::iter_nth, clippy::iter_overeager_cloned, clippy::large_const_arrays, clippy::large_enum_variant, clippy::manual_memcpy, clippy::manual_retain, clippy::manual_str_repeat, clippy::manual_try_fold, clippy::map_entry, clippy::missing_spin_loop, clippy::redundant_allocation, clippy::result_large_err, clippy::single_char_pattern, clippy::slow_vector_initialization, clippy::to_string_in_format_args, clippy::unnecessary_to_owned, clippy::useless_vec, clippy::vec_init_then_push"##,
},
children: &[
"clippy::box_collection",
+ "clippy::box_default",
"clippy::boxed_local",
"clippy::cmp_owned",
+ "clippy::collapsible_str_replace",
+ "clippy::drain_collect",
"clippy::expect_fun_call",
"clippy::extend_with_drain",
+ "clippy::format_collect",
"clippy::format_in_format_args",
"clippy::iter_nth",
+ "clippy::iter_overeager_cloned",
"clippy::large_const_arrays",
"clippy::large_enum_variant",
"clippy::manual_memcpy",
+ "clippy::manual_retain",
"clippy::manual_str_repeat",
+ "clippy::manual_try_fold",
"clippy::map_entry",
- "clippy::mutex_atomic",
- "clippy::needless_collect",
- "clippy::or_fun_call",
+ "clippy::missing_spin_loop",
"clippy::redundant_allocation",
- "clippy::redundant_clone",
+ "clippy::result_large_err",
"clippy::single_char_pattern",
"clippy::slow_vector_initialization",
- "clippy::stable_sort_primitive",
"clippy::to_string_in_format_args",
"clippy::unnecessary_to_owned",
"clippy::useless_vec",
@@ -7453,17 +13692,30 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::restriction",
- description: r##"lint group for: clippy::as_conversions, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::get_unwrap, clippy::if_then_some_else_none, clippy::implicit_return, clippy::indexing_slicing, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_arithmetic, clippy::integer_division, clippy::let_underscore_must_use, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::missing_docs_in_private_items, clippy::missing_enforced_import_renames, clippy::missing_inline_in_public_items, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::rc_buffer, clippy::rc_mutex, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::str_to_string, clippy::string_add, clippy::string_slice, clippy::string_to_string, clippy::todo, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##,
+ description: r##"lint group for: clippy::absolute_paths, clippy::alloc_instead_of_core, clippy::allow_attributes, clippy::allow_attributes_without_reason, clippy::arithmetic_side_effects, clippy::as_conversions, clippy::as_underscore, clippy::assertions_on_result_states, clippy::big_endian_bytes, clippy::clone_on_ref_ptr, clippy::create_dir, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::default_numeric_fallback, clippy::default_union_representation, clippy::deref_by_slicing, clippy::disallowed_script_idents, clippy::else_if_without_else, clippy::empty_drop, clippy::empty_structs_with_brackets, clippy::error_impl_error, clippy::exhaustive_enums, clippy::exhaustive_structs, clippy::exit, clippy::expect_used, clippy::filetype_is_file, clippy::float_arithmetic, clippy::float_cmp_const, clippy::fn_to_numeric_cast_any, clippy::format_push_string, clippy::get_unwrap, clippy::host_endian_bytes, clippy::if_then_some_else_none, clippy::impl_trait_in_params, clippy::implicit_return, clippy::indexing_slicing, clippy::inline_asm_x86_att_syntax, clippy::inline_asm_x86_intel_syntax, clippy::integer_division, clippy::large_include_file, clippy::let_underscore_must_use, clippy::let_underscore_untyped, clippy::little_endian_bytes, clippy::lossy_float_literal, clippy::map_err_ignore, clippy::mem_forget, clippy::min_ident_chars, clippy::missing_assert_message, clippy::missing_asserts_for_indexing, clippy::missing_docs_in_private_items, clippy::missing_enforced_import_renames, clippy::missing_inline_in_public_items, clippy::missing_trait_methods, clippy::mixed_read_write_in_expression, clippy::mod_module_files, clippy::modulo_arithmetic, clippy::multiple_inherent_impl, clippy::multiple_unsafe_ops_per_block, clippy::mutex_atomic, clippy::needless_raw_strings, clippy::non_ascii_literal, clippy::panic, clippy::panic_in_result_fn, clippy::partial_pub_fields, clippy::pattern_type_mismatch, clippy::print_stderr, clippy::print_stdout, clippy::pub_use, clippy::pub_with_shorthand, clippy::pub_without_shorthand, clippy::question_mark_used, clippy::rc_buffer, clippy::rc_mutex, clippy::redundant_type_annotations, clippy::ref_patterns, clippy::rest_pat_in_fully_bound_structs, clippy::same_name_method, clippy::self_named_module_files, clippy::semicolon_inside_block, clippy::semicolon_outside_block, clippy::separated_literal_suffix, clippy::shadow_reuse, clippy::shadow_same, clippy::shadow_unrelated, clippy::single_call_fn, clippy::single_char_lifetime_names, clippy::std_instead_of_alloc, clippy::std_instead_of_core, clippy::str_to_string, clippy::string_add, clippy::string_lit_chars_any, clippy::string_slice, clippy::string_to_string, clippy::suspicious_xor_used_as_pow, clippy::tests_outside_test_module, clippy::todo, clippy::try_err, clippy::undocumented_unsafe_blocks, clippy::unimplemented, clippy::unnecessary_safety_comment, clippy::unnecessary_safety_doc, clippy::unnecessary_self_imports, clippy::unneeded_field_pattern, clippy::unreachable, clippy::unseparated_literal_suffix, clippy::unwrap_in_result, clippy::unwrap_used, clippy::use_debug, clippy::verbose_file_reads, clippy::wildcard_enum_match_arm"##,
},
children: &[
+ "clippy::absolute_paths",
+ "clippy::alloc_instead_of_core",
+ "clippy::allow_attributes",
+ "clippy::allow_attributes_without_reason",
+ "clippy::arithmetic_side_effects",
"clippy::as_conversions",
+ "clippy::as_underscore",
+ "clippy::assertions_on_result_states",
+ "clippy::big_endian_bytes",
"clippy::clone_on_ref_ptr",
"clippy::create_dir",
"clippy::dbg_macro",
"clippy::decimal_literal_representation",
"clippy::default_numeric_fallback",
+ "clippy::default_union_representation",
+ "clippy::deref_by_slicing",
"clippy::disallowed_script_idents",
"clippy::else_if_without_else",
+ "clippy::empty_drop",
+ "clippy::empty_structs_with_brackets",
+ "clippy::error_impl_error",
"clippy::exhaustive_enums",
"clippy::exhaustive_structs",
"clippy::exit",
@@ -7472,46 +13724,78 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::float_arithmetic",
"clippy::float_cmp_const",
"clippy::fn_to_numeric_cast_any",
+ "clippy::format_push_string",
"clippy::get_unwrap",
+ "clippy::host_endian_bytes",
"clippy::if_then_some_else_none",
+ "clippy::impl_trait_in_params",
"clippy::implicit_return",
"clippy::indexing_slicing",
"clippy::inline_asm_x86_att_syntax",
"clippy::inline_asm_x86_intel_syntax",
- "clippy::integer_arithmetic",
"clippy::integer_division",
+ "clippy::large_include_file",
"clippy::let_underscore_must_use",
+ "clippy::let_underscore_untyped",
+ "clippy::little_endian_bytes",
"clippy::lossy_float_literal",
"clippy::map_err_ignore",
"clippy::mem_forget",
+ "clippy::min_ident_chars",
+ "clippy::missing_assert_message",
+ "clippy::missing_asserts_for_indexing",
"clippy::missing_docs_in_private_items",
"clippy::missing_enforced_import_renames",
"clippy::missing_inline_in_public_items",
+ "clippy::missing_trait_methods",
+ "clippy::mixed_read_write_in_expression",
"clippy::mod_module_files",
"clippy::modulo_arithmetic",
"clippy::multiple_inherent_impl",
+ "clippy::multiple_unsafe_ops_per_block",
+ "clippy::mutex_atomic",
+ "clippy::needless_raw_strings",
"clippy::non_ascii_literal",
"clippy::panic",
"clippy::panic_in_result_fn",
+ "clippy::partial_pub_fields",
"clippy::pattern_type_mismatch",
"clippy::print_stderr",
"clippy::print_stdout",
+ "clippy::pub_use",
+ "clippy::pub_with_shorthand",
+ "clippy::pub_without_shorthand",
+ "clippy::question_mark_used",
"clippy::rc_buffer",
"clippy::rc_mutex",
+ "clippy::redundant_type_annotations",
+ "clippy::ref_patterns",
"clippy::rest_pat_in_fully_bound_structs",
"clippy::same_name_method",
"clippy::self_named_module_files",
+ "clippy::semicolon_inside_block",
+ "clippy::semicolon_outside_block",
"clippy::separated_literal_suffix",
"clippy::shadow_reuse",
"clippy::shadow_same",
"clippy::shadow_unrelated",
+ "clippy::single_call_fn",
+ "clippy::single_char_lifetime_names",
+ "clippy::std_instead_of_alloc",
+ "clippy::std_instead_of_core",
"clippy::str_to_string",
"clippy::string_add",
+ "clippy::string_lit_chars_any",
"clippy::string_slice",
"clippy::string_to_string",
+ "clippy::suspicious_xor_used_as_pow",
+ "clippy::tests_outside_test_module",
"clippy::todo",
+ "clippy::try_err",
"clippy::undocumented_unsafe_blocks",
"clippy::unimplemented",
+ "clippy::unnecessary_safety_comment",
+ "clippy::unnecessary_safety_doc",
"clippy::unnecessary_self_imports",
"clippy::unneeded_field_pattern",
"clippy::unreachable",
@@ -7526,12 +13810,11 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::style",
- description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blacklisted_name, clippy::blocks_in_if_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::into_iter_on_ref, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_map, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_doctest_main, clippy::needless_late_init, clippy::needless_range_loop, clippy::needless_return, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::toplevel_ref_arg, clippy::try_err, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unsafe_removed_from_name, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_else_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##,
+ description: r##"lint group for: clippy::assertions_on_constants, clippy::assign_op_pattern, clippy::blocks_in_if_conditions, clippy::bool_assert_comparison, clippy::borrow_interior_mutable_const, clippy::builtin_type_shadow, clippy::bytes_nth, clippy::chars_last_cmp, clippy::chars_next_cmp, clippy::cmp_null, clippy::collapsible_else_if, clippy::collapsible_if, clippy::collapsible_match, clippy::comparison_chain, clippy::comparison_to_empty, clippy::declare_interior_mutable_const, clippy::default_instead_of_iter_empty, clippy::disallowed_macros, clippy::disallowed_methods, clippy::disallowed_names, clippy::disallowed_types, clippy::double_must_use, clippy::double_neg, clippy::duplicate_underscore_argument, clippy::enum_variant_names, clippy::err_expect, clippy::excessive_precision, clippy::field_reassign_with_default, clippy::filter_map_bool_then, clippy::fn_to_numeric_cast, clippy::fn_to_numeric_cast_with_truncation, clippy::for_kv_map, clippy::from_over_into, clippy::from_str_radix_10, clippy::get_first, clippy::implicit_saturating_add, clippy::implicit_saturating_sub, clippy::inconsistent_digit_grouping, clippy::infallible_destructuring_match, clippy::inherent_to_string, clippy::init_numbered_fields, clippy::into_iter_on_ref, clippy::is_digit_ascii_radix, clippy::items_after_test_module, clippy::iter_cloned_collect, clippy::iter_next_slice, clippy::iter_nth_zero, clippy::iter_skip_next, clippy::just_underscores_and_digits, clippy::len_without_is_empty, clippy::len_zero, clippy::let_and_return, clippy::let_unit_value, clippy::main_recursion, clippy::manual_async_fn, clippy::manual_bits, clippy::manual_is_ascii_check, clippy::manual_is_finite, clippy::manual_is_infinite, clippy::manual_map, clippy::manual_next_back, clippy::manual_non_exhaustive, clippy::manual_range_contains, clippy::manual_saturating_arithmetic, clippy::manual_while_let_some, clippy::map_clone, clippy::map_collect_result_unit, clippy::match_like_matches_macro, clippy::match_overlapping_arm, clippy::match_ref_pats, clippy::match_result_ok, clippy::mem_replace_option_with_none, clippy::mem_replace_with_default, clippy::missing_safety_doc, clippy::mixed_case_hex_literals, clippy::module_inception, clippy::must_use_unit, clippy::mut_mutex_lock, clippy::needless_borrow, clippy::needless_borrows_for_generic_args, clippy::needless_doctest_main, clippy::needless_else, clippy::needless_late_init, clippy::needless_parens_on_range_literals, clippy::needless_pub_self, clippy::needless_range_loop, clippy::needless_return, clippy::needless_return_with_question_mark, clippy::neg_multiply, clippy::new_ret_no_self, clippy::new_without_default, clippy::non_minimal_cfg, clippy::obfuscated_if_else, clippy::ok_expect, clippy::op_ref, clippy::option_map_or_none, clippy::partialeq_to_none, clippy::print_literal, clippy::print_with_newline, clippy::println_empty_string, clippy::ptr_arg, clippy::ptr_eq, clippy::question_mark, clippy::redundant_closure, clippy::redundant_field_names, clippy::redundant_pattern, clippy::redundant_pattern_matching, clippy::redundant_static_lifetimes, clippy::result_map_or_into_option, clippy::result_unit_err, clippy::same_item_push, clippy::self_named_constructors, clippy::should_implement_trait, clippy::single_char_add_str, clippy::single_component_path_imports, clippy::single_match, clippy::string_extend_chars, clippy::tabs_in_doc_comments, clippy::to_digit_is_some, clippy::toplevel_ref_arg, clippy::trim_split_whitespace, clippy::unnecessary_fold, clippy::unnecessary_lazy_evaluations, clippy::unnecessary_mut_passed, clippy::unnecessary_owned_empty_strings, clippy::unsafe_removed_from_name, clippy::unused_unit, clippy::unusual_byte_groupings, clippy::unwrap_or_default, clippy::upper_case_acronyms, clippy::while_let_on_iterator, clippy::write_literal, clippy::write_with_newline, clippy::writeln_empty_string, clippy::wrong_self_convention, clippy::zero_ptr"##,
},
children: &[
"clippy::assertions_on_constants",
"clippy::assign_op_pattern",
- "clippy::blacklisted_name",
"clippy::blocks_in_if_conditions",
"clippy::bool_assert_comparison",
"clippy::borrow_interior_mutable_const",
@@ -7546,21 +13829,34 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::comparison_chain",
"clippy::comparison_to_empty",
"clippy::declare_interior_mutable_const",
+ "clippy::default_instead_of_iter_empty",
+ "clippy::disallowed_macros",
+ "clippy::disallowed_methods",
+ "clippy::disallowed_names",
+ "clippy::disallowed_types",
"clippy::double_must_use",
"clippy::double_neg",
"clippy::duplicate_underscore_argument",
"clippy::enum_variant_names",
+ "clippy::err_expect",
"clippy::excessive_precision",
"clippy::field_reassign_with_default",
+ "clippy::filter_map_bool_then",
"clippy::fn_to_numeric_cast",
"clippy::fn_to_numeric_cast_with_truncation",
"clippy::for_kv_map",
"clippy::from_over_into",
"clippy::from_str_radix_10",
+ "clippy::get_first",
+ "clippy::implicit_saturating_add",
+ "clippy::implicit_saturating_sub",
"clippy::inconsistent_digit_grouping",
"clippy::infallible_destructuring_match",
"clippy::inherent_to_string",
+ "clippy::init_numbered_fields",
"clippy::into_iter_on_ref",
+ "clippy::is_digit_ascii_radix",
+ "clippy::items_after_test_module",
"clippy::iter_cloned_collect",
"clippy::iter_next_slice",
"clippy::iter_nth_zero",
@@ -7569,12 +13865,19 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::len_without_is_empty",
"clippy::len_zero",
"clippy::let_and_return",
+ "clippy::let_unit_value",
"clippy::main_recursion",
"clippy::manual_async_fn",
+ "clippy::manual_bits",
+ "clippy::manual_is_ascii_check",
+ "clippy::manual_is_finite",
+ "clippy::manual_is_infinite",
"clippy::manual_map",
+ "clippy::manual_next_back",
"clippy::manual_non_exhaustive",
"clippy::manual_range_contains",
"clippy::manual_saturating_arithmetic",
+ "clippy::manual_while_let_some",
"clippy::map_clone",
"clippy::map_collect_result_unit",
"clippy::match_like_matches_macro",
@@ -7589,16 +13892,24 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::must_use_unit",
"clippy::mut_mutex_lock",
"clippy::needless_borrow",
+ "clippy::needless_borrows_for_generic_args",
"clippy::needless_doctest_main",
+ "clippy::needless_else",
"clippy::needless_late_init",
+ "clippy::needless_parens_on_range_literals",
+ "clippy::needless_pub_self",
"clippy::needless_range_loop",
"clippy::needless_return",
+ "clippy::needless_return_with_question_mark",
"clippy::neg_multiply",
"clippy::new_ret_no_self",
"clippy::new_without_default",
+ "clippy::non_minimal_cfg",
+ "clippy::obfuscated_if_else",
"clippy::ok_expect",
"clippy::op_ref",
"clippy::option_map_or_none",
+ "clippy::partialeq_to_none",
"clippy::print_literal",
"clippy::print_with_newline",
"clippy::println_empty_string",
@@ -7622,14 +13933,15 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
"clippy::tabs_in_doc_comments",
"clippy::to_digit_is_some",
"clippy::toplevel_ref_arg",
- "clippy::try_err",
+ "clippy::trim_split_whitespace",
"clippy::unnecessary_fold",
"clippy::unnecessary_lazy_evaluations",
"clippy::unnecessary_mut_passed",
+ "clippy::unnecessary_owned_empty_strings",
"clippy::unsafe_removed_from_name",
"clippy::unused_unit",
"clippy::unusual_byte_groupings",
- "clippy::unwrap_or_else_default",
+ "clippy::unwrap_or_default",
"clippy::upper_case_acronyms",
"clippy::while_let_on_iterator",
"clippy::write_literal",
@@ -7642,25 +13954,58 @@ pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &[
LintGroup {
lint: Lint {
label: "clippy::suspicious",
- description: r##"lint group for: clippy::blanket_clippy_restriction_lints, clippy::empty_loop, clippy::eval_order_dependence, clippy::float_equality_without_abs, clippy::for_loops_over_fallibles, clippy::misrefactored_assign_op, clippy::mut_range_bound, clippy::mutable_key_type, clippy::octal_escapes, clippy::return_self_not_must_use, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_unary_op_formatting"##,
+ description: r##"lint group for: clippy::almost_complete_range, clippy::arc_with_non_send_sync, clippy::await_holding_invalid_type, clippy::await_holding_lock, clippy::await_holding_refcell_ref, clippy::blanket_clippy_restriction_lints, clippy::cast_abs_to_unsigned, clippy::cast_enum_constructor, clippy::cast_enum_truncation, clippy::cast_nan_to_int, clippy::cast_slice_from_raw_parts, clippy::crate_in_macro_def, clippy::drop_non_drop, clippy::duplicate_mod, clippy::empty_loop, clippy::float_equality_without_abs, clippy::forget_non_drop, clippy::four_forward_slashes, clippy::from_raw_with_void_ptr, clippy::iter_out_of_bounds, clippy::let_underscore_future, clippy::lines_filter_map_ok, clippy::maybe_misused_cfg, clippy::misnamed_getters, clippy::misrefactored_assign_op, clippy::multi_assignments, clippy::mut_range_bound, clippy::mutable_key_type, clippy::no_effect_replace, clippy::non_canonical_clone_impl, clippy::non_canonical_partial_ord_impl, clippy::octal_escapes, clippy::path_ends_with_ext, clippy::permissions_set_readonly_false, clippy::print_in_format_impl, clippy::rc_clone_in_vec_init, clippy::single_range_in_vec_init, clippy::size_of_ref, clippy::suspicious_arithmetic_impl, clippy::suspicious_assignment_formatting, clippy::suspicious_command_arg_space, clippy::suspicious_doc_comments, clippy::suspicious_else_formatting, clippy::suspicious_map, clippy::suspicious_op_assign_impl, clippy::suspicious_to_owned, clippy::suspicious_unary_op_formatting, clippy::swap_ptr_to_ref, clippy::type_id_on_box"##,
},
children: &[
+ "clippy::almost_complete_range",
+ "clippy::arc_with_non_send_sync",
+ "clippy::await_holding_invalid_type",
+ "clippy::await_holding_lock",
+ "clippy::await_holding_refcell_ref",
"clippy::blanket_clippy_restriction_lints",
+ "clippy::cast_abs_to_unsigned",
+ "clippy::cast_enum_constructor",
+ "clippy::cast_enum_truncation",
+ "clippy::cast_nan_to_int",
+ "clippy::cast_slice_from_raw_parts",
+ "clippy::crate_in_macro_def",
+ "clippy::drop_non_drop",
+ "clippy::duplicate_mod",
"clippy::empty_loop",
- "clippy::eval_order_dependence",
"clippy::float_equality_without_abs",
- "clippy::for_loops_over_fallibles",
+ "clippy::forget_non_drop",
+ "clippy::four_forward_slashes",
+ "clippy::from_raw_with_void_ptr",
+ "clippy::iter_out_of_bounds",
+ "clippy::let_underscore_future",
+ "clippy::lines_filter_map_ok",
+ "clippy::maybe_misused_cfg",
+ "clippy::misnamed_getters",
"clippy::misrefactored_assign_op",
+ "clippy::multi_assignments",
"clippy::mut_range_bound",
"clippy::mutable_key_type",
+ "clippy::no_effect_replace",
+ "clippy::non_canonical_clone_impl",
+ "clippy::non_canonical_partial_ord_impl",
"clippy::octal_escapes",
- "clippy::return_self_not_must_use",
+ "clippy::path_ends_with_ext",
+ "clippy::permissions_set_readonly_false",
+ "clippy::print_in_format_impl",
+ "clippy::rc_clone_in_vec_init",
+ "clippy::single_range_in_vec_init",
+ "clippy::size_of_ref",
"clippy::suspicious_arithmetic_impl",
"clippy::suspicious_assignment_formatting",
+ "clippy::suspicious_command_arg_space",
+ "clippy::suspicious_doc_comments",
"clippy::suspicious_else_formatting",
"clippy::suspicious_map",
"clippy::suspicious_op_assign_impl",
+ "clippy::suspicious_to_owned",
"clippy::suspicious_unary_op_formatting",
+ "clippy::swap_ptr_to_ref",
+ "clippy::type_id_on_box",
],
},
];
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
index 330af442f..9363bdfa1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -3,7 +3,7 @@
use std::collections::VecDeque;
use base_db::{FileId, SourceDatabaseExt};
-use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics};
+use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
use syntax::{
ast::{self, make},
AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
@@ -117,7 +117,7 @@ pub fn get_definition(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
) -> Option<Definition> {
- for token in sema.descend_into_macros(token, 0.into()) {
+ for token in sema.descend_into_macros(DescendPreference::None, token) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() {
return Some(x);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index e475c5cd6..a4f0a6df7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -1,14 +1,14 @@
//! Look up accessible paths for items.
+
use hir::{
- AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef,
+ AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, ModPath, Module, ModuleDef, Name,
PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type,
};
-use itertools::Itertools;
-use rustc_hash::FxHashSet;
+use itertools::{EitherOrBoth, Itertools};
+use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, make, HasName},
- utils::path_to_string_stripping_turbo_fish,
- AstNode, SyntaxNode,
+ AstNode, SmolStr, SyntaxNode,
};
use crate::{
@@ -51,39 +51,39 @@ pub struct TraitImportCandidate {
#[derive(Debug)]
pub struct PathImportCandidate {
/// Optional qualifier before name.
- pub qualifier: Option<FirstSegmentUnresolved>,
+ pub qualifier: Option<Vec<SmolStr>>,
/// The name the item (struct, trait, enum, etc.) should have.
pub name: NameToImport,
}
-/// A qualifier that has a first segment and it's unresolved.
-#[derive(Debug)]
-pub struct FirstSegmentUnresolved {
- fist_segment: ast::NameRef,
- full_qualifier: ast::Path,
-}
-
/// A name that will be used during item lookups.
#[derive(Debug, Clone)]
pub enum NameToImport {
/// Requires items with names that exactly match the given string, bool indicates case-sensitivity.
Exact(String, bool),
- /// Requires items with names that case-insensitively contain all letters from the string,
+ /// Requires items with names that match the given string by prefix, bool indicates case-sensitivity.
+ Prefix(String, bool),
+ /// Requires items with names contain all letters from the string,
/// in the same order, but not necessary adjacent.
- Fuzzy(String),
+ Fuzzy(String, bool),
}
impl NameToImport {
pub fn exact_case_sensitive(s: String) -> NameToImport {
NameToImport::Exact(s, true)
}
-}
-impl NameToImport {
+ pub fn fuzzy(s: String) -> NameToImport {
+ // unless all chars are lowercase, we do a case sensitive search
+ let case_sensitive = s.chars().any(|c| c.is_uppercase());
+ NameToImport::Fuzzy(s, case_sensitive)
+ }
+
pub fn text(&self) -> &str {
match self {
- NameToImport::Exact(text, _) => text.as_str(),
- NameToImport::Fuzzy(text) => text.as_str(),
+ NameToImport::Prefix(text, _)
+ | NameToImport::Exact(text, _)
+ | NameToImport::Fuzzy(text, _) => text.as_str(),
}
}
}
@@ -165,7 +165,7 @@ impl ImportAssets {
Some(Self {
import_candidate: ImportCandidate::TraitMethod(TraitImportCandidate {
receiver_ty,
- assoc_item_name: NameToImport::Fuzzy(fuzzy_method_name),
+ assoc_item_name: NameToImport::fuzzy(fuzzy_method_name),
}),
module_with_candidate: module_with_method_call,
candidate_node,
@@ -188,18 +188,11 @@ pub struct LocatedImport {
/// the original item is the associated constant, but the import has to be a trait that
/// defines this constant.
pub original_item: ItemInNs,
- /// A path of the original item.
- pub original_path: Option<ModPath>,
}
impl LocatedImport {
- pub fn new(
- import_path: ModPath,
- item_to_import: ItemInNs,
- original_item: ItemInNs,
- original_path: Option<ModPath>,
- ) -> Self {
- Self { import_path, item_to_import, original_item, original_path }
+ pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self {
+ Self { import_path, item_to_import, original_item }
}
}
@@ -213,9 +206,10 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>,
prefix_kind: PrefixKind,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_imports");
- self.search_for(sema, Some(prefix_kind), prefer_no_std)
+ self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude)
}
/// This may return non-absolute paths if a part of the returned path is already imported into scope.
@@ -223,17 +217,36 @@ impl ImportAssets {
&self,
sema: &Semantics<'_, RootDatabase>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for_relative_paths");
- self.search_for(sema, None, prefer_no_std)
+ self.search_for(sema, None, prefer_no_std, prefer_prelude)
}
- pub fn path_fuzzy_name_to_exact(&mut self, case_sensitive: bool) {
+ /// Requires imports to by prefix instead of fuzzily.
+ pub fn path_fuzzy_name_to_prefix(&mut self) {
if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
&mut self.import_candidate
{
- let name = match to_import {
- NameToImport::Fuzzy(name) => std::mem::take(name),
+ let (name, case_sensitive) = match to_import {
+ NameToImport::Fuzzy(name, case_sensitive) => {
+ (std::mem::take(name), *case_sensitive)
+ }
+ _ => return,
+ };
+ *to_import = NameToImport::Prefix(name, case_sensitive);
+ }
+ }
+
+ /// Requires imports to match exactly instead of fuzzily.
+ pub fn path_fuzzy_name_to_exact(&mut self) {
+ if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
+ &mut self.import_candidate
+ {
+ let (name, case_sensitive) = match to_import {
+ NameToImport::Fuzzy(name, case_sensitive) => {
+ (std::mem::take(name), *case_sensitive)
+ }
_ => return,
};
*to_import = NameToImport::Exact(name, case_sensitive);
@@ -245,6 +258,7 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Vec<LocatedImport> {
let _p = profile::span("import_assets::search_for");
@@ -256,6 +270,7 @@ impl ImportAssets {
&self.module_with_candidate,
prefixed,
prefer_no_std,
+ prefer_prelude,
)
};
@@ -322,64 +337,75 @@ fn path_applicable_imports(
)
.filter_map(|item| {
let mod_path = mod_path(item)?;
- Some(LocatedImport::new(mod_path.clone(), item, item, Some(mod_path)))
- })
- .collect()
- }
- Some(first_segment_unresolved) => {
- let unresolved_qualifier =
- path_to_string_stripping_turbo_fish(&first_segment_unresolved.full_qualifier);
- let unresolved_first_segment = first_segment_unresolved.fist_segment.text();
- items_locator::items_with_name(
- sema,
- current_crate,
- path_candidate.name.clone(),
- AssocSearchMode::Include,
- Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
- )
- .filter_map(|item| {
- import_for_item(
- sema.db,
- mod_path,
- &unresolved_first_segment,
- &unresolved_qualifier,
- item,
- )
+ Some(LocatedImport::new(mod_path, item, item))
})
.collect()
}
+ Some(qualifier) => items_locator::items_with_name(
+ sema,
+ current_crate,
+ path_candidate.name.clone(),
+ AssocSearchMode::Include,
+ Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item))
+ .collect(),
}
}
fn import_for_item(
db: &RootDatabase,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
- unresolved_first_segment: &str,
- unresolved_qualifier: &str,
+ unresolved_qualifier: &[SmolStr],
original_item: ItemInNs,
) -> Option<LocatedImport> {
let _p = profile::span("import_assets::import_for_item");
+ let [first_segment, ..] = unresolved_qualifier else { return None };
- let original_item_candidate = item_for_path_search(db, original_item)?;
+ let item_as_assoc = item_as_assoc(db, original_item);
+
+ let (original_item_candidate, trait_item_to_import) = match item_as_assoc {
+ Some(assoc_item) => match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => {
+ let trait_ = ItemInNs::from(ModuleDef::from(trait_));
+ (trait_, Some(trait_))
+ }
+ AssocItemContainer::Impl(impl_) => {
+ (ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?)), None)
+ }
+ },
+ None => (original_item, None),
+ };
let import_path_candidate = mod_path(original_item_candidate)?;
- let import_path_string = import_path_candidate.display(db).to_string();
- let expected_import_end = if item_as_assoc(db, original_item).is_some() {
- unresolved_qualifier.to_string()
- } else {
- format!("{unresolved_qualifier}::{}", item_name(db, original_item)?.display(db))
+ let mut import_path_candidate_segments = import_path_candidate.segments().iter().rev();
+ let predicate = |it: EitherOrBoth<&SmolStr, &Name>| match it {
+ // segments match, check next one
+ EitherOrBoth::Both(a, b) if b.as_str() == Some(&**a) => None,
+ // segments mismatch / qualifier is longer than the path, bail out
+ EitherOrBoth::Both(..) | EitherOrBoth::Left(_) => Some(false),
+ // all segments match and we have exhausted the qualifier, proceed
+ EitherOrBoth::Right(_) => Some(true),
};
- if !import_path_string.contains(unresolved_first_segment)
- || !import_path_string.ends_with(&expected_import_end)
- {
+ if item_as_assoc.is_none() {
+ let item_name = item_name(db, original_item)?.as_text()?;
+ let last_segment = import_path_candidate_segments.next()?;
+ if last_segment.as_str() != Some(&*item_name) {
+ return None;
+ }
+ }
+ let ends_with = unresolved_qualifier
+ .iter()
+ .rev()
+ .zip_longest(import_path_candidate_segments)
+ .find_map(predicate)
+ .unwrap_or(true);
+ if !ends_with {
return None;
}
- let segment_import =
- find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
- let trait_item_to_import = item_as_assoc(db, original_item)
- .and_then(|assoc| assoc.containing_trait(db))
- .map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
+ let segment_import = find_import_for_segment(db, original_item_candidate, first_segment)?;
+
Some(match (segment_import == original_item_candidate, trait_item_to_import) {
(true, Some(_)) => {
// FIXME we should be able to import both the trait and the segment,
@@ -387,42 +413,37 @@ fn import_for_item(
// especially in case of lazy completion edit resolutions.
return None;
}
- (false, Some(trait_to_import)) => LocatedImport::new(
- mod_path(trait_to_import)?,
- trait_to_import,
- original_item,
- mod_path(original_item),
- ),
- (true, None) => LocatedImport::new(
- import_path_candidate,
- original_item_candidate,
- original_item,
- mod_path(original_item),
- ),
- (false, None) => LocatedImport::new(
- mod_path(segment_import)?,
- segment_import,
- original_item,
- mod_path(original_item),
- ),
+ (false, Some(trait_to_import)) => {
+ LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item)
+ }
+ (true, None) => {
+ LocatedImport::new(import_path_candidate, original_item_candidate, original_item)
+ }
+ (false, None) => {
+ LocatedImport::new(mod_path(segment_import)?, segment_import, original_item)
+ }
})
}
pub fn item_for_path_search(db: &RootDatabase, item: ItemInNs) -> Option<ItemInNs> {
Some(match item {
ItemInNs::Types(_) | ItemInNs::Values(_) => match item_as_assoc(db, item) {
- Some(assoc_item) => match assoc_item.container(db) {
- AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
- AssocItemContainer::Impl(impl_) => {
- ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
- }
- },
+ Some(assoc_item) => item_for_path_search_assoc(db, assoc_item)?,
None => item,
},
ItemInNs::Macros(_) => item,
})
}
+fn item_for_path_search_assoc(db: &RootDatabase, assoc_item: AssocItem) -> Option<ItemInNs> {
+ Some(match assoc_item.container(db) {
+ AssocItemContainer::Trait(trait_) => ItemInNs::from(ModuleDef::from(trait_)),
+ AssocItemContainer::Impl(impl_) => {
+ ItemInNs::from(ModuleDef::from(impl_.self_ty(db).as_adt()?))
+ }
+ })
+}
+
fn find_import_for_segment(
db: &RootDatabase,
original_item: ItemInNs,
@@ -499,6 +520,7 @@ fn trait_applicable_items(
.collect();
let mut located_imports = FxHashSet::default();
+ let mut trait_import_paths = FxHashMap::default();
if trait_assoc_item {
trait_candidate.receiver_ty.iterate_path_candidates(
@@ -516,12 +538,14 @@ fn trait_applicable_items(
}
let located_trait = assoc.containing_trait(db)?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
- let original_item = assoc_to_item(assoc);
+ let import_path = trait_import_paths
+ .entry(trait_item)
+ .or_insert_with(|| mod_path(trait_item))
+ .clone()?;
located_imports.insert(LocatedImport::new(
- mod_path(trait_item)?,
+ import_path,
trait_item,
- original_item,
- mod_path(original_item),
+ assoc_to_item(assoc),
));
}
None::<()>
@@ -539,12 +563,14 @@ fn trait_applicable_items(
if required_assoc_items.contains(&assoc) {
let located_trait = assoc.containing_trait(db)?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
- let original_item = assoc_to_item(assoc);
+ let import_path = trait_import_paths
+ .entry(trait_item)
+ .or_insert_with(|| mod_path(trait_item))
+ .clone()?;
located_imports.insert(LocatedImport::new(
- mod_path(trait_item)?,
+ import_path,
trait_item,
- original_item,
- mod_path(original_item),
+ assoc_to_item(assoc),
));
}
None::<()>
@@ -569,11 +595,18 @@ fn get_mod_path(
module_with_candidate: &Module,
prefixed: Option<PrefixKind>,
prefer_no_std: bool,
+ prefer_prelude: bool,
) -> Option<ModPath> {
if let Some(prefix_kind) = prefixed {
- module_with_candidate.find_use_path_prefixed(db, item_to_search, prefix_kind, prefer_no_std)
+ module_with_candidate.find_use_path_prefixed(
+ db,
+ item_to_search,
+ prefix_kind,
+ prefer_no_std,
+ prefer_prelude,
+ )
} else {
- module_with_candidate.find_use_path(db, item_to_search, prefer_no_std)
+ module_with_candidate.find_use_path(db, item_to_search, prefer_no_std, prefer_prelude)
}
}
@@ -623,7 +656,7 @@ impl ImportCandidate {
fuzzy_name: String,
sema: &Semantics<'_, RootDatabase>,
) -> Option<Self> {
- path_import_candidate(sema, qualifier, NameToImport::Fuzzy(fuzzy_name))
+ path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name))
}
}
@@ -635,18 +668,13 @@ fn path_import_candidate(
Some(match qualifier {
Some(qualifier) => match sema.resolve_path(&qualifier) {
None => {
- let qualifier_start =
- qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
- let qualifier_start_path =
- qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
- if sema.resolve_path(&qualifier_start_path).is_none() {
- ImportCandidate::Path(PathImportCandidate {
- qualifier: Some(FirstSegmentUnresolved {
- fist_segment: qualifier_start,
- full_qualifier: qualifier,
- }),
- name,
- })
+ if qualifier.first_qualifier().map_or(true, |it| sema.resolve_path(&it).is_none()) {
+ let mut qualifier = qualifier
+ .segments_of_this_path_only_rev()
+ .map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text())))
+ .collect::<Option<Vec<_>>>()?;
+ qualifier.reverse();
+ ImportCandidate::Path(PathImportCandidate { qualifier: Some(qualifier), name })
} else {
return None;
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
index 9be1d3663..a0cfd3836 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs
@@ -9,7 +9,7 @@ use syntax::{
algo,
ast::{
self, edit_in_place::Removable, make, AstNode, HasAttrs, HasModuleItem, HasVisibility,
- PathSegmentKind,
+ PathSegmentKind, UseTree,
},
ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
};
@@ -157,6 +157,29 @@ impl ImportScope {
/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
+ insert_use_with_alias_option(scope, path, cfg, None);
+}
+
+pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
+ let text: &str = "use foo as _";
+ let parse = syntax::SourceFile::parse(text);
+ let node = parse
+ .tree()
+ .syntax()
+ .descendants()
+ .find_map(UseTree::cast)
+ .expect("Failed to make ast node `Rename`");
+ let alias = node.rename();
+
+ insert_use_with_alias_option(scope, path, cfg, alias);
+}
+
+fn insert_use_with_alias_option(
+ scope: &ImportScope,
+ path: ast::Path,
+ cfg: &InsertUseConfig,
+ alias: Option<ast::Rename>,
+) {
let _p = profile::span("insert_use");
let mut mb = match cfg.granularity {
ImportGranularity::Crate => Some(MergeBehavior::Crate),
@@ -176,7 +199,8 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
}
let use_item =
- make::use_(None, make::use_tree(path.clone(), None, None, false)).clone_for_update();
+ make::use_(None, make::use_tree(path.clone(), None, alias, false)).clone_for_update();
+
// merge into existing imports if possible
if let Some(mb) = mb {
let filter = |it: &_| !(cfg.skip_glob_imports && ast::Use::is_simple_glob(it));
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
index b92e367f7..01d2f1970 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
@@ -993,6 +993,46 @@ use foo::bar::qux;
);
}
+#[test]
+fn insert_with_renamed_import_simple_use() {
+ check_with_config(
+ "use self::foo::Foo",
+ r#"
+use self::foo::Foo as _;
+"#,
+ r#"
+use self::foo::Foo;
+"#,
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::BySelf,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ );
+}
+
+#[test]
+fn insert_with_renamed_import_complex_use() {
+ check_with_config(
+ "use self::foo::Foo;",
+ r#"
+use self::foo::{self, Foo as _, Bar};
+"#,
+ r#"
+use self::foo::{self, Foo, Bar};
+"#,
+ &InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::BySelf,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ );
+}
+
fn check_with_config(
path: &str,
ra_fixture_before: &str,
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
index 27b6321f3..ff84e9ffa 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -78,6 +78,10 @@ fn try_merge_trees_mut(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehav
{
lhs.split_prefix(&lhs_prefix);
rhs.split_prefix(&rhs_prefix);
+ } else {
+ ted::replace(lhs.syntax(), rhs.syntax());
+ // we can safely return here, in this case `recursive_merge` doesn't do anything
+ return Some(());
}
recursive_merge(lhs, rhs, merge)
}
@@ -123,6 +127,13 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
// so they need to be handled explicitly
.or_else(|| tree.star_token().map(|_| false))
};
+
+ if lhs_t.rename().and_then(|x| x.underscore_token()).is_some() {
+ ted::replace(lhs_t.syntax(), rhs_t.syntax());
+ *lhs_t = rhs_t;
+ continue;
+ }
+
match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
(Some(true), None) => continue,
(None, Some(true)) => {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
index 3f7a3ec2d..4a5d234f7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -3,13 +3,13 @@
//! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs.
use either::Either;
-use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics};
+use hir::{import_map, Crate, ItemInNs, Semantics};
use limit::Limit;
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
/// A value to use, when uncertain which limit to pick.
-pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
+pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
pub use import_map::AssocSearchMode;
@@ -31,26 +31,36 @@ pub fn items_with_name<'a>(
)
});
+ let prefix = matches!(name, NameToImport::Prefix(..));
let (mut local_query, mut external_query) = match name {
- NameToImport::Exact(exact_name, case_sensitive) => {
+ NameToImport::Prefix(exact_name, case_sensitive)
+ | NameToImport::Exact(exact_name, case_sensitive) => {
let mut local_query = symbol_index::Query::new(exact_name.clone());
- local_query.exact();
-
- let external_query = import_map::Query::new(exact_name);
-
- (
- local_query,
- if case_sensitive { external_query.case_sensitive() } else { external_query },
- )
+ let mut external_query =
+ // import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search);
+ import_map::Query::new(exact_name);
+ if prefix {
+ local_query.prefix();
+ external_query = external_query.prefix();
+ } else {
+ local_query.exact();
+ external_query = external_query.exact();
+ }
+ if case_sensitive {
+ local_query.case_sensitive();
+ external_query = external_query.case_sensitive();
+ }
+ (local_query, external_query)
}
- NameToImport::Fuzzy(fuzzy_search_string) => {
+ NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => {
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
+ local_query.fuzzy();
let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
.fuzzy()
.assoc_search_mode(assoc_item_search);
- if fuzzy_search_string.to_lowercase() != fuzzy_search_string {
+ if case_sensitive {
local_query.case_sensitive();
external_query = external_query.case_sensitive();
}
@@ -93,8 +103,8 @@ fn find_items<'a>(
.into_iter()
.filter(move |candidate| match assoc_item_search {
AssocSearchMode::Include => true,
- AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(),
- AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(),
+ AssocSearchMode::Exclude => !candidate.is_assoc,
+ AssocSearchMode::AssocItemsOnly => candidate.is_assoc,
})
.map(|local_candidate| match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index 226def4d5..fefc05e53 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -2,7 +2,7 @@
//!
//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod apply_change;
@@ -144,6 +144,7 @@ impl RootDatabase {
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
db.update_parse_query_lru_capacity(lru_capacity);
+ db.setup_syntax_context_root();
db
}
@@ -156,7 +157,6 @@ impl RootDatabase {
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// macro expansions are usually rather small, so we can afford to keep more of them alive
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
- hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
}
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
@@ -174,12 +174,6 @@ impl RootDatabase {
.copied()
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
);
- hir_db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(
- lru_capacities
- .get(stringify!(MacroExpandQuery))
- .copied()
- .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
- );
macro_rules! update_lru_capacity_per_query {
($( $module:ident :: $query:ident )*) => {$(
@@ -204,11 +198,10 @@ impl RootDatabase {
hir_db::AstIdMapQuery
// hir_db::ParseMacroExpansionQuery
// hir_db::InternMacroCallQuery
- hir_db::MacroArgNodeQuery
+ hir_db::MacroArgQuery
hir_db::DeclMacroExpanderQuery
// hir_db::MacroExpandQuery
hir_db::ExpandProcMacroQuery
- hir_db::HygieneFrameQuery
hir_db::ParseMacroExpansionErrorQuery
// DefDatabase
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index fb75b5b45..fb4c0c126 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -2,7 +2,7 @@
use crate::helpers::mod_path_to_ast;
use either::Either;
-use hir::{AsAssocItem, HirDisplay, SemanticsScope};
+use hir::{AsAssocItem, HirDisplay, ModuleDef, SemanticsScope};
use rustc_hash::FxHashMap;
use syntax::{
ast::{self, make, AstNode},
@@ -183,6 +183,7 @@ impl<'a> PathTransform<'a> {
lifetime_substs,
target_module,
source_scope: self.source_scope,
+ same_self_type: self.target_scope.has_same_self_type(self.source_scope),
};
ctx.transform_default_values(defaulted_params);
ctx
@@ -195,6 +196,7 @@ struct Ctx<'a> {
lifetime_substs: FxHashMap<LifetimeName, ast::Lifetime>,
target_module: hir::Module,
source_scope: &'a SemanticsScope<'a>,
+ same_self_type: bool,
}
fn postorder(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> {
@@ -277,6 +279,7 @@ impl Ctx<'_> {
self.source_scope.db.upcast(),
hir::ModuleDef::Trait(trait_ref),
false,
+ true,
)?;
match make::ty_path(mod_path_to_ast(&found_path)) {
ast::Type::PathType(path_ty) => Some(path_ty),
@@ -311,8 +314,12 @@ impl Ctx<'_> {
}
}
- let found_path =
- self.target_module.find_use_path(self.source_scope.db.upcast(), def, false)?;
+ let found_path = self.target_module.find_use_path(
+ self.source_scope.db.upcast(),
+ def,
+ false,
+ true,
+ )?;
let res = mod_path_to_ast(&found_path).clone_for_update();
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
if let Some(segment) = res.segment() {
@@ -327,8 +334,42 @@ impl Ctx<'_> {
ted::replace(path.syntax(), subst.clone_subtree().clone_for_update());
}
}
+ hir::PathResolution::SelfType(imp) => {
+ // keep Self type if it does not need to be replaced
+ if self.same_self_type {
+ return None;
+ }
+
+ let ty = imp.self_ty(self.source_scope.db);
+ let ty_str = &ty
+ .display_source_code(
+ self.source_scope.db,
+ self.source_scope.module().into(),
+ true,
+ )
+ .ok()?;
+ let ast_ty = make::ty(&ty_str).clone_for_update();
+
+ if let Some(adt) = ty.as_adt() {
+ if let ast::Type::PathType(path_ty) = &ast_ty {
+ let found_path = self.target_module.find_use_path(
+ self.source_scope.db.upcast(),
+ ModuleDef::from(adt),
+ false,
+ true,
+ )?;
+
+ if let Some(qual) = mod_path_to_ast(&found_path).qualifier() {
+ let res = make::path_concat(qual, path_ty.path()?).clone_for_update();
+ ted::replace(path.syntax(), res.syntax());
+ return Some(());
+ }
+ }
+ }
+
+ ted::replace(path.syntax(), ast_ty.syntax());
+ }
hir::PathResolution::Local(_)
- | hir::PathResolution::SelfType(_)
| hir::PathResolution::Def(_)
| hir::PathResolution::BuiltinAttr(_)
| hir::PathResolution::ToolModule(_)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 353a9749a..d2b6a7326 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -22,10 +22,10 @@
//! Our current behavior is ¯\_(ツ)_/¯.
use std::fmt;
-use base_db::{AnchoredPathBuf, FileId, FileRange};
+use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange};
use either::Either;
-use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics};
-use stdx::never;
+use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics};
+use stdx::{never, TupleExt};
use syntax::{
ast::{self, HasName},
AstNode, SyntaxKind, TextRange, T,
@@ -34,7 +34,7 @@ use text_edit::{TextEdit, TextEditBuilder};
use crate::{
defs::Definition,
- search::FileReference,
+ search::{FileReference, FileReferenceNode},
source_change::{FileSystemEdit, SourceChange},
syntax_helpers::node_ext::expr_as_name_ref,
traits::convert_to_def_in_trait,
@@ -103,6 +103,7 @@ impl Definition {
/// renamed and extern crate names will report its range, though a rename will introduce
/// an alias instead.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
+ let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then(|| range);
let res = match self {
Definition::Macro(mac) => {
let src = mac.source(sema.db)?;
@@ -110,14 +111,18 @@ impl Definition {
Either::Left(it) => it.name()?,
Either::Right(it) => it.name()?,
};
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
Definition::Field(field) => {
let src = field.source(sema.db)?;
match &src.value {
FieldSource::Named(record_field) => {
let name = record_field.name()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
FieldSource::Pos(_) => None,
}
@@ -125,25 +130,31 @@ impl Definition {
Definition::Module(module) => {
let src = module.declaration_source(sema.db)?;
let name = src.value.name()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
- Definition::Function(it) => name_range(it, sema),
+ Definition::Function(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Adt(adt) => match adt {
- hir::Adt::Struct(it) => name_range(it, sema),
- hir::Adt::Union(it) => name_range(it, sema),
- hir::Adt::Enum(it) => name_range(it, sema),
+ hir::Adt::Struct(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ hir::Adt::Union(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ hir::Adt::Enum(it) => name_range(it, sema).and_then(syn_ctx_is_root),
},
- Definition::Variant(it) => name_range(it, sema),
- Definition::Const(it) => name_range(it, sema),
- Definition::Static(it) => name_range(it, sema),
- Definition::Trait(it) => name_range(it, sema),
- Definition::TraitAlias(it) => name_range(it, sema),
- Definition::TypeAlias(it) => name_range(it, sema),
- Definition::Local(it) => name_range(it.primary_source(sema.db), sema),
+ Definition::Variant(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Const(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Static(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Trait(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::TraitAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::TypeAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
+ Definition::Local(it) => {
+ name_range(it.primary_source(sema.db), sema).and_then(syn_ctx_is_root)
+ }
Definition::GenericParam(generic_param) => match generic_param {
hir::GenericParam::LifetimeParam(lifetime_param) => {
let src = lifetime_param.source(sema.db)?;
- src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db)
+ src.with_value(src.value.lifetime()?.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
_ => {
let x = match generic_param {
@@ -156,22 +167,30 @@ impl Definition {
Either::Left(x) => x.name()?,
Either::Right(_) => return None,
};
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
},
Definition::Label(label) => {
let src = label.source(sema.db);
let lifetime = src.value.lifetime()?;
- src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
+ src.with_value(lifetime.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
Definition::ExternCrateDecl(it) => {
let src = it.source(sema.db)?;
if let Some(rename) = src.value.rename() {
let name = rename.name()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
} else {
let name = src.value.name_ref()?;
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ src.with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .and_then(syn_ctx_is_root)
}
}
Definition::BuiltinType(_) => return None,
@@ -183,7 +202,10 @@ impl Definition {
};
return res;
- fn name_range<D>(def: D, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange>
+ fn name_range<D>(
+ def: D,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> Option<(FileRange, SyntaxContextId)>
where
D: HasSource,
D::Ast: ast::HasName,
@@ -256,8 +278,10 @@ fn rename_mod(
let file_id = src.file_id.original_file(sema.db);
match src.value.name() {
Some(name) => {
- if let Some(file_range) =
- src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ if let Some(file_range) = src
+ .with_value(name.syntax())
+ .original_file_range_opt(sema.db)
+ .map(TupleExt::head)
{
source_change.insert_source_edit(
file_id,
@@ -337,7 +361,7 @@ pub fn source_edit_from_references(
// macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
let mut edited_ranges = Vec::new();
for &FileReference { range, ref name, .. } in references {
- let name_range = name.syntax().text_range();
+ let name_range = name.text_range();
if name_range.len() != range.len() {
// This usage comes from a different token kind that was downmapped to a NameLike in a macro
// Renaming this will most likely break things syntax-wise
@@ -347,17 +371,17 @@ pub fn source_edit_from_references(
// if the ranges differ then the node is inside a macro call, we can't really attempt
// to make special rewrites like shorthand syntax and such, so just rename the node in
// the macro input
- ast::NameLike::NameRef(name_ref) if name_range == range => {
+ FileReferenceNode::NameRef(name_ref) if name_range == range => {
source_edit_from_name_ref(&mut edit, name_ref, new_name, def)
}
- ast::NameLike::Name(name) if name_range == range => {
+ FileReferenceNode::Name(name) if name_range == range => {
source_edit_from_name(&mut edit, name, new_name)
}
_ => false,
};
if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
let (range, new_name) = match name {
- ast::NameLike::Lifetime(_) => (
+ FileReferenceNode::Lifetime(_) => (
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
),
@@ -493,7 +517,12 @@ fn source_edit_from_def(
for source in local.sources(sema.db) {
let source = match source.source.clone().original_ast_node(sema.db) {
Some(source) => source,
- None => match source.source.syntax().original_file_range_opt(sema.db) {
+ None => match source
+ .source
+ .syntax()
+ .original_file_range_opt(sema.db)
+ .map(TupleExt::head)
+ {
Some(FileRange { file_id: file_id2, range }) => {
file_id = Some(file_id2);
edit.replace(range, new_name.to_owned());
@@ -504,7 +533,7 @@ fn source_edit_from_def(
}
},
};
- file_id = source.file_id.file_id();
+ file_id = Some(source.file_id);
if let Either::Left(pat) = source.value {
let name_range = pat.name().unwrap().syntax().text_range();
// special cases required for renaming fields/locals in Record patterns
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index 9c4f0ac8c..dbef36026 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -8,13 +8,14 @@ use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{
- AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility,
+ AsAssocItem, DefWithBody, DescendPreference, HasAttrs, HasSource, HirFileIdExt, InFile,
+ InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
};
use memchr::memmem::Finder;
use nohash_hasher::IntMap;
use once_cell::unsync::Lazy;
use parser::SyntaxKind;
-use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
+use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize};
use triomphe::Arc;
use crate::{
@@ -62,10 +63,67 @@ pub struct FileReference {
/// The range of the reference in the original file
pub range: TextRange,
/// The node of the reference in the (macro-)file
- pub name: ast::NameLike,
+ pub name: FileReferenceNode,
pub category: Option<ReferenceCategory>,
}
+#[derive(Debug, Clone)]
+pub enum FileReferenceNode {
+ Name(ast::Name),
+ NameRef(ast::NameRef),
+ Lifetime(ast::Lifetime),
+ FormatStringEntry(ast::String, TextRange),
+}
+
+impl FileReferenceNode {
+ pub fn text_range(&self) -> TextRange {
+ match self {
+ FileReferenceNode::Name(it) => it.syntax().text_range(),
+ FileReferenceNode::NameRef(it) => it.syntax().text_range(),
+ FileReferenceNode::Lifetime(it) => it.syntax().text_range(),
+ FileReferenceNode::FormatStringEntry(_, range) => *range,
+ }
+ }
+ pub fn syntax(&self) -> SyntaxElement {
+ match self {
+ FileReferenceNode::Name(it) => it.syntax().clone().into(),
+ FileReferenceNode::NameRef(it) => it.syntax().clone().into(),
+ FileReferenceNode::Lifetime(it) => it.syntax().clone().into(),
+ FileReferenceNode::FormatStringEntry(it, _) => it.syntax().clone().into(),
+ }
+ }
+ pub fn into_name_like(self) -> Option<ast::NameLike> {
+ match self {
+ FileReferenceNode::Name(it) => Some(ast::NameLike::Name(it)),
+ FileReferenceNode::NameRef(it) => Some(ast::NameLike::NameRef(it)),
+ FileReferenceNode::Lifetime(it) => Some(ast::NameLike::Lifetime(it)),
+ FileReferenceNode::FormatStringEntry(_, _) => None,
+ }
+ }
+ pub fn as_name_ref(&self) -> Option<&ast::NameRef> {
+ match self {
+ FileReferenceNode::NameRef(name_ref) => Some(name_ref),
+ _ => None,
+ }
+ }
+ pub fn as_lifetime(&self) -> Option<&ast::Lifetime> {
+ match self {
+ FileReferenceNode::Lifetime(lifetime) => Some(lifetime),
+ _ => None,
+ }
+ }
+ pub fn text(&self) -> syntax::TokenText<'_> {
+ match self {
+ FileReferenceNode::NameRef(name_ref) => name_ref.text(),
+ FileReferenceNode::Name(name) => name.text(),
+ FileReferenceNode::Lifetime(lifetime) => lifetime.text(),
+ FileReferenceNode::FormatStringEntry(it, range) => {
+ syntax::TokenText::borrowed(&it.text()[*range - it.syntax().text_range().start()])
+ }
+ }
+ }
+}
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ReferenceCategory {
// FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
@@ -132,7 +190,8 @@ impl SearchScope {
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
- if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db)
+ {
(file_id, Some(call_source.text_range()))
} else {
(
@@ -465,7 +524,9 @@ impl<'a> FindUsages<'a> {
// every textual hit. That function is notoriously
// expensive even for things that do not get down mapped
// into macros.
- sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent())
+ sema.descend_into_macros(DescendPreference::None, token)
+ .into_iter()
+ .filter_map(|it| it.parent())
})
};
@@ -475,6 +536,17 @@ impl<'a> FindUsages<'a> {
// Search for occurrences of the items name
for offset in match_indices(&text, finder, search_range) {
+ tree.token_at_offset(offset).into_iter().for_each(|token| {
+ let Some(str_token) = ast::String::cast(token.clone()) else { return };
+ if let Some((range, nameres)) =
+ sema.check_for_format_args_template(token.clone(), offset)
+ {
+ if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
+ return;
+ }
+ }
+ });
+
for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) {
if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@@ -584,12 +656,12 @@ impl<'a> FindUsages<'a> {
) -> bool {
match NameRefClass::classify(self.sema, name_ref) {
Some(NameRefClass::Definition(Definition::SelfType(impl_)))
- if impl_.self_ty(self.sema.db) == *self_ty =>
+ if impl_.self_ty(self.sema.db).as_adt() == self_ty.as_adt() =>
{
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: None,
};
sink(file_id, reference)
@@ -608,7 +680,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import),
};
sink(file_id, reference)
@@ -617,6 +689,27 @@ impl<'a> FindUsages<'a> {
}
}
+ fn found_format_args_ref(
+ &self,
+ file_id: FileId,
+ range: TextRange,
+ token: ast::String,
+ res: Option<PathResolution>,
+ sink: &mut dyn FnMut(FileId, FileReference) -> bool,
+ ) -> bool {
+ match res.map(Definition::from) {
+ Some(def) if def == self.def => {
+ let reference = FileReference {
+ range,
+ name: FileReferenceNode::FormatStringEntry(token, range),
+ category: Some(ReferenceCategory::Read),
+ };
+ sink(file_id, reference)
+ }
+ _ => false,
+ }
+ }
+
fn found_lifetime(
&self,
lifetime: &ast::Lifetime,
@@ -627,7 +720,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Lifetime(lifetime.clone()),
+ name: FileReferenceNode::Lifetime(lifetime.clone()),
category: None,
};
sink(file_id, reference)
@@ -651,7 +744,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@@ -667,7 +760,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@@ -677,7 +770,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: ReferenceCategory::new(&def, name_ref),
};
sink(file_id, reference)
@@ -701,7 +794,7 @@ impl<'a> FindUsages<'a> {
};
let reference = FileReference {
range,
- name: ast::NameLike::NameRef(name_ref.clone()),
+ name: FileReferenceNode::NameRef(name_ref.clone()),
category: access,
};
sink(file_id, reference)
@@ -724,7 +817,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Name(name.clone()),
+ name: FileReferenceNode::Name(name.clone()),
// FIXME: mutable patterns should have `Write` access
category: Some(ReferenceCategory::Read),
};
@@ -734,7 +827,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Name(name.clone()),
+ name: FileReferenceNode::Name(name.clone()),
category: None,
};
sink(file_id, reference)
@@ -759,7 +852,7 @@ impl<'a> FindUsages<'a> {
let FileRange { file_id, range } = self.sema.original_range(name.syntax());
let reference = FileReference {
range,
- name: ast::NameLike::Name(name.clone()),
+ name: FileReferenceNode::Name(name.clone()),
category: None,
};
sink(file_id, reference)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index 39763479c..c7188f1f7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -140,10 +140,10 @@ impl SnippetEdit {
.with_position()
.map(|pos| {
let (snippet, index) = match pos {
- itertools::Position::First(it) | itertools::Position::Middle(it) => it,
+ (itertools::Position::First, it) | (itertools::Position::Middle, it) => it,
// last/only snippet gets index 0
- itertools::Position::Last((snippet, _))
- | itertools::Position::Only((snippet, _)) => (snippet, 0),
+ (itertools::Position::Last, (snippet, _))
+ | (itertools::Position::Only, (snippet, _)) => (snippet, 0),
};
let range = match snippet {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index f699f999b..be8566b75 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -43,13 +43,20 @@ use triomphe::Arc;
use crate::RootDatabase;
-#[derive(Debug)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+enum SearchMode {
+ Fuzzy,
+ Exact,
+ Prefix,
+}
+
+#[derive(Debug, Clone)]
pub struct Query {
query: String,
lowercased: String,
only_types: bool,
libs: bool,
- exact: bool,
+ mode: SearchMode,
case_sensitive: bool,
limit: usize,
}
@@ -62,7 +69,7 @@ impl Query {
lowercased,
only_types: false,
libs: false,
- exact: false,
+ mode: SearchMode::Fuzzy,
case_sensitive: false,
limit: usize::max_value(),
}
@@ -76,8 +83,16 @@ impl Query {
self.libs = true;
}
+ pub fn fuzzy(&mut self) {
+ self.mode = SearchMode::Fuzzy;
+ }
+
pub fn exact(&mut self) {
- self.exact = true;
+ self.mode = SearchMode::Exact;
+ }
+
+ pub fn prefix(&mut self) {
+ self.mode = SearchMode::Prefix;
}
pub fn case_sensitive(&mut self) {
@@ -329,13 +344,23 @@ impl Query {
{
continue;
}
- if self.exact {
- if symbol.name != self.query {
- continue;
+ let skip = match self.mode {
+ SearchMode::Fuzzy => {
+ self.case_sensitive
+ && self.query.chars().any(|c| !symbol.name.contains(c))
}
- } else if self.case_sensitive
- && self.query.chars().any(|c| !symbol.name.contains(c))
- {
+ SearchMode::Exact => symbol.name != self.query,
+ SearchMode::Prefix if self.case_sensitive => {
+ !symbol.name.starts_with(&self.query)
+ }
+ SearchMode::Prefix => symbol
+ .name
+ .chars()
+ .zip(self.lowercased.chars())
+ .all(|(n, q)| n.to_lowercase().next() == Some(q)),
+ };
+
+ if skip {
continue;
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
index 7834c6603..7c01ac069 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -21,21 +21,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 83..119,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 109..118,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 109..118,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Struct",
@@ -50,21 +51,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "mul1",
@@ -79,21 +81,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "mul2",
@@ -108,21 +111,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s1",
@@ -137,21 +141,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s1",
@@ -166,21 +171,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 83..119,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 109..118,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 109..118,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
FileSymbol {
name: "s2",
@@ -195,21 +201,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..81,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 74..80,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 74..80,
+ },
+ ),
},
container_name: None,
is_alias: true,
+ is_assoc: false,
},
],
),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 87ad5844c..c9875c7f8 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -19,21 +19,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: TYPE_ALIAS,
range: 397..417,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 402..407,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 402..407,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "CONST",
@@ -46,21 +47,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: CONST,
range: 340..361,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 346..351,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 346..351,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "CONST_WITH_INNER",
@@ -73,21 +75,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: CONST,
range: 520..592,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 526..542,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 526..542,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Enum",
@@ -102,21 +105,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: ENUM,
range: 185..207,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 190..194,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 190..194,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ItemLikeMacro",
@@ -131,21 +135,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 654..676,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 663..676,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 663..676,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Macro",
@@ -160,21 +165,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MACRO_DEF,
range: 153..168,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 159..164,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 159..164,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "STATIC",
@@ -187,21 +193,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STATIC,
range: 362..396,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 369..375,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 369..375,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Struct",
@@ -216,21 +223,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 170..184,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 177..183,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 177..183,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructFromMacro",
@@ -245,23 +253,22 @@
),
loc: DeclarationLocation {
hir_file_id: MacroFile(
- MacroFile {
- macro_call_id: MacroCallId(
- 0,
- ),
- },
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..22,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 6..21,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 6..21,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInFn",
@@ -276,23 +283,24 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 318..336,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 325..335,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 325..335,
+ },
+ ),
},
container_name: Some(
"main",
),
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInNamedConst",
@@ -307,23 +315,24 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 555..581,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 562..580,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 562..580,
+ },
+ ),
},
container_name: Some(
"CONST_WITH_INNER",
),
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInUnnamedConst",
@@ -338,21 +347,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 479..507,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 486..506,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 486..506,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Trait",
@@ -365,21 +375,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: TRAIT,
range: 261..300,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 267..272,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 267..272,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Trait",
@@ -394,21 +405,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 682..696,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 691..696,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 691..696,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "Union",
@@ -423,21 +435,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: UNION,
range: 208..222,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 214..219,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 214..219,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "a_mod",
@@ -452,21 +465,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MODULE,
range: 419..457,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 423..428,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 423..428,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "b_mod",
@@ -481,21 +495,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MODULE,
range: 594..604,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 598..603,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 598..603,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "define_struct",
@@ -510,21 +525,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
range: 51..131,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 64..77,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 64..77,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "impl_fn",
@@ -537,21 +553,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: FN,
range: 242..257,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 245..252,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 245..252,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: true,
},
FileSymbol {
name: "macro_rules_macro",
@@ -566,21 +583,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
range: 1..48,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 14..31,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 14..31,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "main",
@@ -593,21 +611,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: FN,
range: 302..338,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 305..309,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 305..309,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "really_define_struct",
@@ -622,21 +641,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 611..648,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 628..648,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 628..648,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "trait_fn",
@@ -649,23 +669,24 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: FN,
range: 279..298,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 282..290,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 282..290,
+ },
+ ),
},
container_name: Some(
"Trait",
),
is_alias: false,
+ is_assoc: true,
},
],
),
@@ -691,21 +712,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 435..455,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 442..454,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 442..454,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
],
),
@@ -731,21 +753,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 111..143,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 127..143,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 127..143,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "StructInModB",
@@ -760,21 +783,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
range: 0..20,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 7..19,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 7..19,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "SuperItemLikeMacro",
@@ -789,21 +813,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 25..59,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 41..59,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 41..59,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
@@ -818,21 +843,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 95..105,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 95..105,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
@@ -847,21 +873,22 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
- FileId(
- 1,
- ),
+ 1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
- name_ptr: SyntaxNodePtr {
- kind: NAME,
- range: 95..105,
- },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 95..105,
+ },
+ ),
},
container_name: None,
is_alias: false,
+ is_assoc: false,
},
],
),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
index c7d5f3613..c8cf87d3c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/tests/sourcegen_lints.rs
@@ -1,4 +1,5 @@
-//! Generates descriptors structure for unstable feature from Unstable Book
+//! Generates descriptor structures for unstable features from the unstable book
+//! and lints from rustc, rustdoc, and clippy.
use std::{borrow::Cow, fs, path::Path};
use itertools::Itertools;
@@ -6,6 +7,8 @@ use stdx::format_to;
use test_utils::project_root;
use xshell::{cmd, Shell};
+const DESTINATION: &str = "crates/ide-db/src/generated/lints.rs";
+
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
/// manually by un-ignoring the test from time to time.
#[test]
@@ -14,11 +17,21 @@ fn sourcegen_lint_completions() {
let sh = &Shell::new().unwrap();
let rust_repo = project_root().join("./target/rust");
- if !rust_repo.exists() {
+ if rust_repo.exists() {
+ cmd!(sh, "git -C {rust_repo} pull --rebase").run().unwrap();
+ } else {
cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust {rust_repo}")
.run()
.unwrap();
}
+ // need submodules for Cargo to parse the workspace correctly
+ cmd!(
+ sh,
+ "git -C {rust_repo} submodule update --init --recursive --depth=1 --
+ compiler library src/tools"
+ )
+ .run()
+ .unwrap();
let mut contents = String::from(
r"
@@ -27,17 +40,28 @@ pub struct Lint {
pub label: &'static str,
pub description: &'static str,
}
+
pub struct LintGroup {
pub lint: Lint,
pub children: &'static [&'static str],
}
+
",
);
generate_lint_descriptor(sh, &mut contents);
contents.push('\n');
- generate_feature_descriptor(&mut contents, &rust_repo.join("src/doc/unstable-book/src"));
+ let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string());
+ let unstable_book = project_root().join("./target/unstable-book-gen");
+ cmd!(
+ sh,
+ "{cargo} run --manifest-path {rust_repo}/src/tools/unstable-book-gen/Cargo.toml --
+ {rust_repo}/library {rust_repo}/compiler {rust_repo}/src {unstable_book}"
+ )
+ .run()
+ .unwrap();
+ generate_feature_descriptor(&mut contents, &unstable_book.join("src"));
contents.push('\n');
let lints_json = project_root().join("./target/clippy_lints.json");
@@ -51,41 +75,60 @@ pub struct LintGroup {
let contents = sourcegen::add_preamble("sourcegen_lints", sourcegen::reformat(contents));
- let destination = project_root().join("crates/ide_db/src/generated/lints.rs");
+ let destination = project_root().join(DESTINATION);
sourcegen::ensure_file_contents(destination.as_path(), &contents);
}
+/// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`.
+///
+/// As of writing, the output of `rustc -Whelp` (not rustdoc) has the following format:
+///
+/// ```text
+/// Lint checks provided by rustc:
+///
+/// name default meaning
+/// ---- ------- -------
+///
+/// ...
+///
+/// Lint groups provided by rustc:
+///
+/// name sub-lints
+/// ---- ---------
+///
+/// ...
+/// ```
+///
+/// `rustdoc -Whelp` (and any other custom `rustc` driver) adds another two
+/// tables after the `rustc` ones, with a different title but the same format.
fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
- // FIXME: rustdoc currently requires an input file for -Whelp cc https://github.com/rust-lang/rust/pull/88831
- let file = project_root().join(file!());
- let stdout = cmd!(sh, "rustdoc -W help {file}").read().unwrap();
- let start_lints = stdout.find("---- ------- -------").unwrap();
- let start_lint_groups = stdout.find("---- ---------").unwrap();
- let start_lints_rustdoc =
- stdout.find("Lint checks provided by plugins loaded by this crate:").unwrap();
- let start_lint_groups_rustdoc =
- stdout.find("Lint groups provided by plugins loaded by this crate:").unwrap();
+ let stdout = cmd!(sh, "rustdoc -Whelp").read().unwrap();
+ let lints_pat = "---- ------- -------\n";
+ let lint_groups_pat = "---- ---------\n";
+ let lints = find_and_slice(&stdout, lints_pat);
+ let lint_groups = find_and_slice(lints, lint_groups_pat);
+ let lints_rustdoc = find_and_slice(lint_groups, lints_pat);
+ let lint_groups_rustdoc = find_and_slice(lints_rustdoc, lint_groups_pat);
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
buf.push('\n');
- let lints = stdout[start_lints..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
+ let lints = lints.lines().take_while(|l| !l.is_empty()).map(|line| {
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
(name.trim(), Cow::Borrowed(description.trim()), vec![])
});
- let lint_groups =
- stdout[start_lint_groups..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
- let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
- (
- name.trim(),
- format!("lint group for: {}", lints.trim()).into(),
- lints
- .split_ascii_whitespace()
- .map(|s| s.trim().trim_matches(',').replace('-', "_"))
- .collect(),
- )
- });
+ let lint_groups = lint_groups.lines().take_while(|l| !l.is_empty()).map(|line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ });
let lints = lints
.chain(lint_groups)
@@ -94,7 +137,8 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
for (name, description, ..) in &lints {
push_lint_completion(buf, &name.replace('-', "_"), description);
}
- buf.push_str("];\n");
+ buf.push_str("];\n\n");
+
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
for (name, description, children) in &lints {
if !children.is_empty() {
@@ -115,27 +159,23 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
buf.push_str(r#"pub const RUSTDOC_LINTS: &[Lint] = &["#);
buf.push('\n');
- let lints_rustdoc =
- stdout[start_lints_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(|line| {
- let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
- let (_default_level, description) =
- rest.trim().split_once(char::is_whitespace).unwrap();
- (name.trim(), Cow::Borrowed(description.trim()), vec![])
- });
+ let lints_rustdoc = lints_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
+ let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
+ let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
+ (name.trim(), Cow::Borrowed(description.trim()), vec![])
+ });
let lint_groups_rustdoc =
- stdout[start_lint_groups_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(
- |line| {
- let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
- (
- name.trim(),
- format!("lint group for: {}", lints.trim()).into(),
- lints
- .split_ascii_whitespace()
- .map(|s| s.trim().trim_matches(',').replace('-', "_"))
- .collect(),
- )
- },
- );
+ lint_groups_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
+ let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
+ (
+ name.trim(),
+ format!("lint group for: {}", lints.trim()).into(),
+ lints
+ .split_ascii_whitespace()
+ .map(|s| s.trim().trim_matches(',').replace('-', "_"))
+ .collect(),
+ )
+ });
let lints_rustdoc = lints_rustdoc
.chain(lint_groups_rustdoc)
@@ -145,7 +185,7 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
for (name, description, ..) in &lints_rustdoc {
push_lint_completion(buf, &name.replace('-', "_"), description)
}
- buf.push_str("];\n");
+ buf.push_str("];\n\n");
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
for (name, description, children) in &lints_rustdoc {
@@ -157,14 +197,24 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
buf.push_str("];\n");
}
+#[track_caller]
+fn find_and_slice<'a>(i: &'a str, p: &str) -> &'a str {
+ let idx = i.find(p).unwrap();
+ &i[idx + p.len()..]
+}
+
+/// Parses the unstable book `src_dir` and prints a constant with the list of
+/// unstable features into `buf`.
+///
+/// It does this by looking for all `.md` files in the `language-features` and
+/// `library-features` directories, and using the file name as the feature
+/// name, and the file contents as the feature description.
fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
let mut features = ["language-features", "library-features"]
.into_iter()
.flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
- .filter(|path| {
- // Get all `.md ` files
- path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
- })
+ // Get all `.md` files
+ .filter(|path| path.extension() == Some("md".as_ref()))
.map(|path| {
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace('-', "_");
let doc = fs::read_to_string(path).unwrap();
@@ -196,7 +246,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
let mut clippy_groups: std::collections::BTreeMap<String, Vec<String>> = Default::default();
- for line in file_content.lines().map(|line| line.trim()) {
+ for line in file_content.lines().map(str::trim) {
if let Some(line) = line.strip_prefix(r#""id": ""#) {
let clippy_lint = ClippyLint {
id: line.strip_suffix(r#"","#).expect("should be suffixed by comma").into(),
@@ -211,12 +261,19 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
.push(clippy_lints.last().unwrap().id.clone());
}
} else if let Some(line) = line.strip_prefix(r#""docs": ""#) {
- let prefix_to_strip = r#" ### What it does"#;
- let line = match line.strip_prefix(prefix_to_strip) {
- Some(line) => line,
+ let header = "### What it does";
+ let line = match line.find(header) {
+ Some(idx) => &line[idx + header.len()..],
None => {
- eprintln!("unexpected clippy prefix for {}", clippy_lints.last().unwrap().id);
- continue;
+ let id = &clippy_lints.last().unwrap().id;
+ // these just don't have the common header
+ let allowed = ["allow_attributes", "read_line_without_trim"];
+ if allowed.contains(&id.as_str()) {
+ line
+ } else {
+ eprintln!("\nunexpected clippy prefix for {id}, line={line:?}\n",);
+ continue;
+ }
}
};
// Only take the description, any more than this is a lot of additional data we would embed into the exe
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
index 14aa39401..f4055024c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -13,9 +13,9 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-either = "1.7.0"
-itertools = "0.10.5"
-serde_json = "1.0.86"
+either.workspace = true
+itertools.workspace = true
+serde_json.workspace = true
once_cell = "1.17.0"
# local deps
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
index 3b69640af..45fc6f8e6 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -1,7 +1,10 @@
//! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
//! expressions and patterns.
-use ide_db::{base_db::FileId, source_change::SourceChange};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ source_change::SourceChange,
+};
use syntax::{ast, match_ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
@@ -49,7 +52,7 @@ fn check_expr_field_shorthand(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct initialization",
- field_range,
+ FileRange { file_id, range: field_range },
)
.with_fixes(Some(vec![fix(
"use_expr_field_shorthand",
@@ -93,7 +96,7 @@ fn check_pat_field_shorthand(
Diagnostic::new(
DiagnosticCode::Clippy("redundant_field_names"),
"Shorthand struct pattern",
- field_range,
+ FileRange { file_id, range: field_range },
)
.with_fixes(Some(vec![fix(
"use_pat_field_shorthand",
@@ -166,7 +169,7 @@ fn main() {
check_diagnostics(
r#"
struct A { a: &'static str }
-fn f(a: A) { let A { a: hello } = a; }
+fn f(a: A) { let A { a: _hello } = a; }
"#,
);
check_diagnostics(
@@ -181,12 +184,14 @@ fn f(a: A) { let A { 0: 0 } = a; }
struct A { a: &'static str }
fn f(a: A) {
let A { a$0: a } = a;
+ _ = a;
}
"#,
r#"
struct A { a: &'static str }
fn f(a: A) {
let A { a } = a;
+ _ = a;
}
"#,
);
@@ -196,12 +201,14 @@ fn f(a: A) {
struct A { a: &'static str, b: &'static str }
fn f(a: A) {
let A { a$0: a, b } = a;
+ _ = (a, b);
}
"#,
r#"
struct A { a: &'static str, b: &'static str }
fn f(a: A) {
let A { a, b } = a;
+ _ = (a, b);
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index 9eb763d3e..3b2e15a17 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -31,7 +31,7 @@ pub(crate) fn inactive_code(
let res = Diagnostic::new(
DiagnosticCode::Ra("inactive-code", Severity::WeakWarning),
message,
- ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ ctx.sema.diagnostics_display_range(d.node.clone()),
)
.with_unused(true);
Some(res)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 235062bf5..0f12e814b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -113,6 +113,31 @@ fn some_fn() {
}
"#,
);
+
+ check_fix(
+ r#"
+static S: i32 = M::A;
+
+mod $0M {
+ pub const A: i32 = 10;
+}
+
+mod other {
+ use crate::M::A;
+}
+"#,
+ r#"
+static S: i32 = m::A;
+
+mod m {
+ pub const A: i32 = 10;
+}
+
+mod other {
+ use crate::m::A;
+}
+"#,
+ );
}
#[test]
@@ -175,10 +200,10 @@ fn NonSnakeCaseName() {}
fn incorrect_function_params() {
check_diagnostics(
r#"
-fn foo(SomeParam: u8) {}
+fn foo(SomeParam: u8) { _ = SomeParam; }
// ^^^^^^^^^ 💡 warn: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
-fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
+fn foo2(ok_param: &str, CAPS_PARAM: u8) { _ = (ok_param, CAPS_PARAM); }
// ^^^^^^^^^^ 💡 warn: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
"#,
);
@@ -188,6 +213,7 @@ fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
fn incorrect_variable_names() {
check_diagnostics(
r#"
+#[allow(unused)]
fn foo() {
let SOME_VALUE = 10;
// ^^^^^^^^^^ 💡 warn: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
@@ -294,6 +320,7 @@ impl someStruct {
// ^^^^^^^^ 💡 warn: Function `SomeFunc` should have snake_case name, e.g. `some_func`
let WHY_VAR_IS_CAPS = 10;
// ^^^^^^^^^^^^^^^ 💡 warn: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
+ _ = WHY_VAR_IS_CAPS;
}
}
"#,
@@ -306,6 +333,7 @@ impl someStruct {
r#"
enum Option { Some, None }
+#[allow(unused)]
fn main() {
match Option::None {
None => (),
@@ -322,6 +350,7 @@ fn main() {
r#"
enum Option { Some, None }
+#[allow(unused)]
fn main() {
match Option::None {
SOME_VAR @ None => (),
@@ -349,7 +378,9 @@ enum E {
}
mod F {
- fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {}
+ fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {
+ _ = BAD_NAME_HI;
+ }
}
"#,
);
@@ -395,7 +426,7 @@ fn qualify() {
#[test] // Issue #8809.
fn parenthesized_parameter() {
- check_diagnostics(r#"fn f((O): _) {}"#)
+ check_diagnostics(r#"fn f((O): _) { _ = O; }"#)
}
#[test]
@@ -472,7 +503,9 @@ mod CheckBadStyle {
mod F {
#![allow(non_snake_case)]
- fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {}
+ fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {
+ _ = BAD_NAME_HI;
+ }
}
#[allow(non_snake_case, non_camel_case_types)]
@@ -510,17 +543,20 @@ fn NonSnakeCaseName(some_var: u8) -> u8 {
#[deny(nonstandard_style)]
mod CheckNonstandardStyle {
+ //^^^^^^^^^^^^^^^^^^^^^ 💡 error: Module `CheckNonstandardStyle` should have snake_case name, e.g. `check_nonstandard_style`
fn HiImABadFnName() {}
//^^^^^^^^^^^^^^ 💡 error: Function `HiImABadFnName` should have snake_case name, e.g. `hi_im_abad_fn_name`
}
#[deny(warnings)]
mod CheckBadStyle {
+ //^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style`
struct fooo;
//^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
}
mod F {
+ //^ 💡 warn: Module `F` should have snake_case name, e.g. `f`
#![deny(non_snake_case)]
fn CheckItWorksWithModAttr() {}
//^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: Function `CheckItWorksWithModAttr` should have snake_case name, e.g. `check_it_works_with_mod_attr`
@@ -563,12 +599,12 @@ fn main() {
//^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
fn BAZ() {
//^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
- let INNER_INNER = 42;
- //^^^^^^^^^^^ 💡 warn: Variable `INNER_INNER` should have snake_case name, e.g. `inner_inner`
+ let _INNER_INNER = 42;
+ //^^^^^^^^^^^^ 💡 warn: Variable `_INNER_INNER` should have snake_case name, e.g. `_inner_inner`
}
- let INNER_LOCAL = 42;
- //^^^^^^^^^^^ 💡 warn: Variable `INNER_LOCAL` should have snake_case name, e.g. `inner_local`
+ let _INNER_LOCAL = 42;
+ //^^^^^^^^^^^^ 💡 warn: Variable `_INNER_LOCAL` should have snake_case name, e.g. `_inner_local`
}
}
"#,
@@ -641,4 +677,30 @@ enum E {
"#,
);
}
+
+ #[test]
+ fn module_name_inline() {
+ check_diagnostics(
+ r#"
+mod M {
+ //^ 💡 warn: Module `M` should have snake_case name, e.g. `m`
+ mod IncorrectCase {}
+ //^^^^^^^^^^^^^ 💡 warn: Module `IncorrectCase` should have snake_case name, e.g. `incorrect_case`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn module_name_decl() {
+ check_diagnostics(
+ r#"
+//- /Foo.rs
+
+//- /main.rs
+mod Foo;
+ //^^^ 💡 warn: Module `Foo` should have snake_case name, e.g. `foo`
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index 1ec17952b..f68f5b44b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target(
ctx: &DiagnosticsContext<'_>,
d: &hir::InvalidDeriveTarget,
) -> Diagnostic {
- let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone());
Diagnostic::new(
DiagnosticCode::RustcHardError("E0774"),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index a337e2660..d330973aa 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -3,7 +3,7 @@
use hir::{PathResolution, Semantics};
use ide_db::{
- base_db::FileId,
+ base_db::{FileId, FileRange},
helpers::mod_path_to_ast,
imports::insert_use::{insert_use, ImportScope},
source_change::SourceChangeBuilder,
@@ -119,7 +119,7 @@ pub(crate) fn json_in_items(
Diagnostic::new(
DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
"JSON syntax is not valid as a Rust item",
- range,
+ FileRange { file_id, range },
)
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(file_id);
@@ -136,6 +136,7 @@ pub(crate) fn json_in_items(
it,
config.insert_use.prefix_kind,
config.prefer_no_std,
+ config.prefer_prelude,
) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
}
@@ -148,6 +149,7 @@ pub(crate) fn json_in_items(
it,
config.insert_use.prefix_kind,
config.prefer_no_std,
+ config.prefer_prelude,
) {
insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index 7ca0a0eab..099de4528 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -60,9 +60,6 @@ macro_rules! compile_error { () => {} }
#[test]
fn eager_macro_concat() {
- // FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic.
- // See: https://github.com/rust-lang/rust-analyzer/issues/10300
-
check_diagnostics(
r#"
//- /lib.rs crate:lib deps:core
@@ -80,7 +77,6 @@ macro_rules! m {
fn f() {
m!();
- //^^^^ error: unresolved macro $crate::private::concat
}
//- /core.rs crate:core
@@ -268,4 +264,24 @@ fn f() {
"#,
)
}
+
+ #[test]
+ fn include_does_not_break_diagnostics() {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("inactive-code".to_string());
+ config.disabled.insert("unlinked-file".to_string());
+ check_diagnostics_with_config(
+ config,
+ r#"
+//- minicore: include
+//- /lib.rs crate:lib
+include!("include-me.rs");
+//- /include-me.rs
+/// long doc that pushes the diagnostic range beyond the first file's text length
+ #[err]
+//^^^^^^error: unresolved macro `err`
+mod prim_never {}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index fc57dde69..6202d1585 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -7,7 +7,7 @@ pub(crate) fn malformed_derive(
ctx: &DiagnosticsContext<'_>,
d: &hir::MalformedDerive,
) -> Diagnostic {
- let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
+ let display_range = ctx.sema.diagnostics_display_range(d.node.clone());
Diagnostic::new(
DiagnosticCode::RustcHardError("E0777"),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index 8265e0b1c..829601802 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -1,8 +1,9 @@
use either::Either;
use hir::InFile;
+use ide_db::base_db::FileRange;
use syntax::{
ast::{self, HasArgList},
- AstNode, SyntaxNodePtr, TextRange,
+ AstNode, SyntaxNodePtr,
};
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
@@ -23,12 +24,7 @@ pub(crate) fn mismatched_tuple_struct_pat_arg_count(
Diagnostic::new(
DiagnosticCode::RustcHardError("E0023"),
message,
- invalid_args_range(
- ctx,
- d.expr_or_pat.clone().map(|it| it.either(Into::into, Into::into)),
- d.expected,
- d.found,
- ),
+ invalid_args_range(ctx, d.expr_or_pat.clone().map(Into::into), d.expected, d.found),
)
}
@@ -53,7 +49,7 @@ fn invalid_args_range(
source: InFile<SyntaxNodePtr>,
expected: usize,
found: usize,
-) -> TextRange {
+) -> FileRange {
adjusted_display_range::<Either<ast::Expr, ast::TupleStructPat>>(ctx, source, &|expr| {
let (text_range, r_paren_token, expected_arg) = match expr {
Either::Left(ast::Expr::CallExpr(call)) => {
@@ -131,7 +127,7 @@ fn f() { zero(); }
fn simple_free_fn_one() {
check_diagnostics(
r#"
-fn one(arg: u8) {}
+fn one(_arg: u8) {}
fn f() { one(); }
//^^ error: expected 1 argument, found 0
"#,
@@ -139,7 +135,7 @@ fn f() { one(); }
check_diagnostics(
r#"
-fn one(arg: u8) {}
+fn one(_arg: u8) {}
fn f() { one(1); }
"#,
);
@@ -176,7 +172,7 @@ fn f() {
check_diagnostics(
r#"
struct S;
-impl S { fn method(&self, arg: u8) {} }
+impl S { fn method(&self, _arg: u8) {} }
fn f() {
S.method();
@@ -187,7 +183,7 @@ impl S { fn method(&self, arg: u8) {} }
check_diagnostics(
r#"
struct S;
-impl S { fn method(&self, arg: u8) {} }
+impl S { fn method(&self, _arg: u8) {} }
fn f() {
S::method(&S, 0);
@@ -335,8 +331,8 @@ struct S;
impl S {
fn method(#[cfg(NEVER)] self) {}
- fn method2(#[cfg(NEVER)] self, arg: u8) {}
- fn method3(self, #[cfg(NEVER)] arg: u8) {}
+ fn method2(#[cfg(NEVER)] self, _arg: u8) {}
+ fn method3(self, #[cfg(NEVER)] _arg: u8) {}
}
extern "C" {
@@ -365,8 +361,8 @@ fn main() {
r#"
#[rustc_legacy_const_generics(1, 3)]
fn mixed<const N1: &'static str, const N2: bool>(
- a: u8,
- b: i8,
+ _a: u8,
+ _b: i8,
) {}
fn f() {
@@ -376,8 +372,8 @@ fn f() {
#[rustc_legacy_const_generics(1, 3)]
fn b<const N1: u8, const N2: u8>(
- a: u8,
- b: u8,
+ _a: u8,
+ _b: u8,
) {}
fn g() {
@@ -403,7 +399,7 @@ fn f(
// ^^ error: this pattern has 0 fields, but the corresponding tuple struct has 2 fields
S(e, f, .., g, d): S
// ^^^^^^^^^ error: this pattern has 4 fields, but the corresponding tuple struct has 2 fields
-) {}
+) { _ = (a, b, c, d, e, f, g); }
"#,
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index acc31cd11..cb38bc54d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -1,7 +1,7 @@
use either::Either;
use hir::{
db::{ExpandDatabase, HirDatabase},
- known, AssocItem, HirDisplay, InFile, Type,
+ known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type,
};
use ide_db::{
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,
@@ -39,7 +39,7 @@ pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingField
d.field_list_parent_path
.clone()
.map(SyntaxNodePtr::from)
- .unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())),
+ .unwrap_or_else(|| d.field_list_parent.clone().into()),
);
Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError("E0063"), message, ptr)
@@ -58,10 +58,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let root = ctx.sema.db.parse_or_expand(d.file);
- let current_module = match &d.field_list_parent {
- Either::Left(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
- Either::Right(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
- };
+ let current_module =
+ ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let build_text_edit = |parent_syntax, new_syntax: &SyntaxNode, old_syntax| {
let edit = {
@@ -87,9 +85,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
)])
};
- match &d.field_list_parent {
- Either::Left(record_expr) => {
- let field_list_parent = record_expr.to_node(&root);
+ match &d.field_list_parent.to_node(&root) {
+ Either::Left(field_list_parent) => {
let missing_fields = ctx.sema.record_literal_missing_fields(&field_list_parent);
let mut locals = FxHashMap::default();
@@ -125,6 +122,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
ctx.sema.db,
item_for_path_search(ctx.sema.db, item_in_ns)?,
ctx.config.prefer_no_std,
+ ctx.config.prefer_prelude,
)?;
use_trivial_constructor(
@@ -152,8 +150,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
old_field_list.syntax(),
)
}
- Either::Right(record_pat) => {
- let field_list_parent = record_pat.to_node(&root);
+ Either::Right(field_list_parent) => {
let missing_fields = ctx.sema.record_pattern_missing_fields(&field_list_parent);
let old_field_list = field_list_parent.record_pat_field_list()?;
@@ -290,6 +287,7 @@ fn x(a: S) {
struct S { s: u32 }
fn x(a: S) {
let S { ref s } = a;
+ _ = s;
}
",
)
@@ -626,7 +624,7 @@ struct TestStruct { one: i32, two: i64 }
fn test_fn() {
let one = 1;
- let s = TestStruct{ one, two: 2 };
+ let _s = TestStruct{ one, two: 2 };
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 06b03d3d1..ef6a273ed 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -17,14 +17,32 @@ pub(crate) fn missing_match_arms(
#[cfg(test)]
mod tests {
- use crate::tests::check_diagnostics;
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config},
+ DiagnosticsConfig,
+ };
+ #[track_caller]
fn check_diagnostics_no_bails(ra_fixture: &str) {
cov_mark::check_count!(validate_match_bailed_out, 0);
crate::tests::check_diagnostics(ra_fixture)
}
#[test]
+ fn empty_body() {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("syntax-error".to_string());
+ check_diagnostics_with_config(
+ config,
+ r#"
+fn main() {
+ match 0;
+}
+"#,
+ );
+ }
+
+ #[test]
fn empty_tuple() {
check_diagnostics_no_bails(
r#"
@@ -564,6 +582,7 @@ fn bang(never: !) {
r#"
enum Option<T> { Some(T), None }
+#[allow(unused)]
fn main() {
// `Never` is deliberately not defined so that it's an uninferred type.
match Option::<Never>::None {
@@ -719,7 +738,7 @@ fn main() {
r#"
struct S { a: char}
fn main(v: S) {
- match v { S{ a } => {} }
+ match v { S{ a } => { _ = a; } }
match v { S{ a: _x } => {} }
match v { S{ a: 'a' } => {} }
match v { S{..} => {} }
@@ -901,7 +920,7 @@ enum E{ A, B }
fn foo() {
match &E::A {
E::A => {}
- x => {}
+ _x => {}
}
}",
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 70b26009b..f93a35cf1 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -1,4 +1,5 @@
use hir::db::ExpandDatabase;
+use hir::HirFileIdExt;
use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{ast, SyntaxNode};
use syntax::{match_ast, AstNode};
@@ -100,9 +101,9 @@ mod tests {
r#"
fn main() {
let x = &5 as *const usize;
- unsafe { let y = *x; }
- let z = *x;
-} //^^💡 error: this operation is unsafe and requires an unsafe function or block
+ unsafe { let _y = *x; }
+ let _z = *x;
+} //^^💡 error: this operation is unsafe and requires an unsafe function or block
"#,
)
}
@@ -116,13 +117,13 @@ struct HasUnsafe;
impl HasUnsafe {
unsafe fn unsafe_fn(&self) {
let x = &5 as *const usize;
- let y = *x;
+ let _y = *x;
}
}
unsafe fn unsafe_fn() {
let x = &5 as *const usize;
- let y = *x;
+ let _y = *x;
}
fn main() {
@@ -152,10 +153,10 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
- let x = STATIC_MUT.a;
- //^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
+ let _x = STATIC_MUT.a;
+ //^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe {
- let x = STATIC_MUT.a;
+ let _x = STATIC_MUT.a;
}
}
"#,
@@ -187,13 +188,13 @@ fn main() {
r#"
fn main() {
let x = &5 as *const usize;
- let z = *x$0;
+ let _z = *x$0;
}
"#,
r#"
fn main() {
let x = &5 as *const usize;
- let z = unsafe { *x };
+ let _z = unsafe { *x };
}
"#,
);
@@ -231,7 +232,7 @@ struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
- let z = *x;
+ let _z = *x;
}
}
fn main() {
@@ -244,7 +245,7 @@ struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
- let z = *x;
+ let _z = *x;
}
}
fn main() {
@@ -267,7 +268,7 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
- let x = STATIC_MUT$0.a;
+ let _x = STATIC_MUT$0.a;
}
"#,
r#"
@@ -278,7 +279,7 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
- let x = unsafe { STATIC_MUT.a };
+ let _x = unsafe { STATIC_MUT.a };
}
"#,
)
@@ -382,16 +383,16 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x;
- x = STATIC_MUT$0;
+ let _x;
+ _x = STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x;
- x = unsafe { STATIC_MUT };
+ let _x;
+ _x = unsafe { STATIC_MUT };
}
"#,
)
@@ -405,14 +406,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = STATIC_MUT$0 + 1;
+ let _x = STATIC_MUT$0 + 1;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = unsafe { STATIC_MUT } + 1;
+ let _x = unsafe { STATIC_MUT } + 1;
}
"#,
)
@@ -425,14 +426,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = &STATIC_MUT$0;
+ let _x = &STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = unsafe { &STATIC_MUT };
+ let _x = unsafe { &STATIC_MUT };
}
"#,
)
@@ -445,14 +446,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = &&STATIC_MUT$0;
+ let _x = &&STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
- let x = unsafe { &&STATIC_MUT };
+ let _x = unsafe { &&STATIC_MUT };
}
"#,
)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 3aa4aa970..886aefeb5 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -29,6 +29,7 @@ fn main() {
let a = &X;
let b = *a;
//^ error: cannot move `X` out of reference
+ _ = b;
}
"#,
);
@@ -46,6 +47,7 @@ fn main() {
let b = a.0;
//^ error: cannot move `X` out of reference
let y = a.1;
+ _ = (b, y);
}
"#,
);
@@ -59,8 +61,8 @@ fn main() {
struct X;
fn main() {
static S: X = X;
- let s = S;
- //^ error: cannot move `X` out of reference
+ let _s = S;
+ //^^ error: cannot move `X` out of reference
}
"#,
);
@@ -165,7 +167,7 @@ enum X {
fn main() {
let x = &X::Bar;
- let c = || match *x {
+ let _c = || match *x {
X::Foo(t) => t,
_ => 5,
};
@@ -173,4 +175,19 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn regression_15787() {
+ check_diagnostics(
+ r#"
+//- minicore: coerce_unsized, slice, copy
+fn foo(mut slice: &[u32]) -> usize {
+ slice = match slice {
+ [0, rest @ ..] | rest => rest,
+ };
+ slice.len()
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index d056e5c85..187511149 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -324,6 +324,7 @@ fn main() {
let x_own = 2;
let ref mut x_ref = x_own;
//^^^^^^^^^^^^^ 💡 error: cannot mutate immutable variable `x_own`
+ _ = x_ref;
}
"#,
);
@@ -331,7 +332,7 @@ fn main() {
r#"
struct Foo;
impl Foo {
- fn method(&mut self, x: i32) {}
+ fn method(&mut self, _x: i32) {}
}
fn main() {
let x = Foo;
@@ -391,6 +392,7 @@ fn main() {
//^^^^^ 💡 warn: variable does not need to be mutable
x = 7;
//^^^^^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
}
}
}
@@ -404,12 +406,14 @@ fn main() {
// there would be no mutability error for locals in dead code. Rustc tries to
// not emit `unused_mut` in this case, but since it works without `mut`, and
// special casing it is not trivial, we emit it.
+
+ // Update: now MIR based `unused-variable` is taking over `unused-mut` for the same reason.
check_diagnostics(
r#"
fn main() {
return;
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -419,7 +423,7 @@ fn main() {
fn main() {
loop {}
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -440,7 +444,7 @@ fn main(b: bool) {
g();
}
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -454,7 +458,7 @@ fn main(b: bool) {
return;
}
let mut x = 2;
- //^^^^^ 💡 warn: variable does not need to be mutable
+ //^^^^^ warn: unused variable
&mut x;
}
"#,
@@ -536,6 +540,7 @@ fn main() {
(k @ 5, ref mut t) if { continue; } => {
//^^^^^^^^^ 💡 error: cannot mutate immutable variable `z`
*t = 5;
+ _ = k;
}
_ => {
let y = (1, 2);
@@ -588,6 +593,7 @@ fn main() {
b = 1;
c = (2, 3);
d = 3;
+ _ = (c, b, d);
}
}
"#,
@@ -600,6 +606,7 @@ fn main() {
r#"
fn f(mut x: i32) {
//^^^^^ 💡 warn: variable does not need to be mutable
+ f(x + 2);
}
"#,
);
@@ -615,8 +622,11 @@ fn f(x: i32) {
r#"
fn f((x, y): (i32, i32)) {
let t = [0; 2];
- x = 5;
- //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ x = 5;
+ //^^^^^ 💡 error: cannot mutate immutable variable `x`
+ _ = x;
+ _ = y;
+ _ = t;
}
"#,
);
@@ -645,6 +655,7 @@ fn f(x: [(i32, u8); 10]) {
//^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
+ _ = b;
}
}
"#,
@@ -666,6 +677,7 @@ fn f(x: [(i32, u8); 10]) {
//^^^^^ 💡 error: cannot mutate immutable variable `a`
c = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `c`
+ _ = (b, d);
}
}
}
@@ -696,18 +708,18 @@ fn f() {
fn overloaded_index() {
check_diagnostics(
r#"
-//- minicore: index
+//- minicore: index, copy
use core::ops::{Index, IndexMut};
struct Foo;
impl Index<usize> for Foo {
type Output = (i32, u8);
- fn index(&self, index: usize) -> &(i32, u8) {
+ fn index(&self, _index: usize) -> &(i32, u8) {
&(5, 2)
}
}
impl IndexMut<usize> for Foo {
- fn index_mut(&mut self, index: usize) -> &mut (i32, u8) {
+ fn index_mut(&mut self, _index: usize) -> &mut (i32, u8) {
&mut (5, 2)
}
}
@@ -715,26 +727,32 @@ fn f() {
let mut x = Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &x[2];
+ _ = (x, y);
let x = Foo;
let y = &mut x[2];
//^💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let mut x = &mut Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y: &mut (i32, u8) = &mut x[2];
+ _ = (x, y);
let x = Foo;
let ref mut y = x[7];
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let (ref mut y, _) = x[3];
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
match x[10] {
//^ 💡 error: cannot mutate immutable variable `x`
- (ref y, _) => (),
- (_, ref mut y) => (),
+ (ref y, 5) => _ = y,
+ (_, ref mut y) => _ = y,
}
let mut x = Foo;
let mut i = 5;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &mut x[i];
+ _ = y;
}
"#,
);
@@ -744,7 +762,7 @@ fn f() {
fn overloaded_deref() {
check_diagnostics(
r#"
-//- minicore: deref_mut
+//- minicore: deref_mut, copy
use core::ops::{Deref, DerefMut};
struct Foo;
@@ -763,21 +781,27 @@ fn f() {
let mut x = Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &*x;
+ _ = (x, y);
let x = Foo;
let y = &mut *x;
//^^ 💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let x = Foo;
+ //^ warn: unused variable
let x = Foo;
let y: &mut (i32, u8) = &mut x;
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
+ _ = (x, y);
let ref mut y = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
let (ref mut y, _) = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
+ _ = y;
match *x {
//^^ 💡 error: cannot mutate immutable variable `x`
- (ref y, _) => (),
- (_, ref mut y) => (),
+ (ref y, 5) => _ = y,
+ (_, ref mut y) => _ = y,
}
}
"#,
@@ -866,6 +890,7 @@ pub fn test() {
data: 0
}
);
+ _ = tree;
}
"#,
);
@@ -925,6 +950,7 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
let x = X;
let closure4 = || { x.mutate(); };
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = (closure2, closure3, closure4);
}
"#,
);
@@ -941,7 +967,9 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
z = 3;
let mut k = z;
//^^^^^ 💡 warn: variable does not need to be mutable
+ _ = k;
};
+ _ = (x, closure);
}
"#,
);
@@ -958,6 +986,7 @@ fn f() {
}
}
};
+ _ = closure;
}
"#,
);
@@ -972,7 +1001,8 @@ fn f() {
let mut x = X;
let c2 = || { x = X; x };
let mut x = X;
- let c2 = move || { x = X; };
+ let c3 = move || { x = X; };
+ _ = (c1, c2, c3);
}
"#,
);
@@ -1023,7 +1053,7 @@ fn x(t: &[u8]) {
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
-
+ _ = b;
}
_ => {}
}
@@ -1079,6 +1109,7 @@ fn f() {
let x = Box::new(5);
let closure = || *x = 2;
//^ 💡 error: cannot mutate immutable variable `x`
+ _ = closure;
}
"#,
);
@@ -1156,6 +1187,7 @@ macro_rules! mac {
fn main2() {
let mut x = mac![];
//^^^^^ 💡 warn: variable does not need to be mutable
+ _ = x;
}
"#,
);
@@ -1196,4 +1228,20 @@ fn foo(mut foo: Foo) {
"#,
);
}
+
+ #[test]
+ fn regression_15670() {
+ check_diagnostics(
+ r#"
+//- minicore: fn
+
+pub struct A {}
+pub unsafe fn foo(a: *mut A) {
+ let mut b = || -> *mut A { &mut *a };
+ //^^^^^ 💡 warn: variable does not need to be mutable
+ let _ = b();
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
index 290c16c9d..0abcbffe7 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,5 +1,5 @@
use either::Either;
-use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics};
+use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics};
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
use syntax::{
ast::{self, edit::IndentLevel, make},
@@ -13,7 +13,7 @@ use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
//
// This diagnostic is triggered if created structure does not have field provided in record.
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
- let node = d.field.clone().map(|it| it.either(Into::into, Into::into));
+ let node = d.field.clone().map(Into::into);
if d.private {
// FIXME: quickfix to add required visibility
Diagnostic::new_with_syntax_node_ptr(
@@ -35,15 +35,13 @@ pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField)
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
// FIXME: quickfix for pattern
- match &d.field.value {
- Either::Left(ptr) => {
- let root = ctx.sema.db.parse_or_expand(d.field.file_id);
- missing_record_expr_field_fixes(
- &ctx.sema,
- d.field.file_id.original_file(ctx.sema.db),
- &ptr.to_node(&root),
- )
- }
+ let root = ctx.sema.db.parse_or_expand(d.field.file_id);
+ match &d.field.value.to_node(&root) {
+ Either::Left(node) => missing_record_expr_field_fixes(
+ &ctx.sema,
+ d.field.file_id.original_file(ctx.sema.db),
+ node,
+ ),
_ => None,
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
index c44d28e77..a828b8b4f 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
@@ -1,5 +1,3 @@
-use either::Either;
-
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: private-assoc-item
@@ -28,13 +26,7 @@ pub(crate) fn private_assoc_item(
},
name,
),
- d.expr_or_pat.clone().map(|it| match it {
- Either::Left(it) => it.into(),
- Either::Right(it) => match it {
- Either::Left(it) => it.into(),
- Either::Right(it) => it.into(),
- },
- }),
+ d.expr_or_pat.clone().map(Into::into),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index 083ef3e8d..258ac6cd8 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -1,4 +1,4 @@
-use hir::{db::ExpandDatabase, InFile};
+use hir::{db::ExpandDatabase, HirFileIdExt, InFile};
use ide_db::source_change::SourceChange;
use syntax::{
ast::{self, HasArgList},
@@ -74,8 +74,8 @@ mod tests {
r#"
//- minicore: iterators
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
"#,
);
}
@@ -117,7 +117,7 @@ fn foo() {
fn foo() {
let mut m = core::iter::repeat(())
.filter_map(|()| Some(92));
- let n = m.next();
+ let _n = m.next();
}
"#,
);
@@ -148,22 +148,22 @@ fn foo() {
fn foo() {
#[allow(clippy::filter_map_next)]
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
}
#[deny(clippy::filter_map_next)]
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
#[warn(clippy::filter_map_next)]
fn foo() {
- let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
-} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
+ let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
new file mode 100644
index 000000000..251a64529
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
@@ -0,0 +1,129 @@
+use hir::InFile;
+use syntax::ast;
+
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
+
+// Diagnostic: trait-impl-incorrect-safety
+//
+// Diagnoses incorrect safety annotations of trait impls.
+pub(crate) fn trait_impl_incorrect_safety(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplIncorrectSafety,
+) -> Diagnostic {
+ Diagnostic::new(
+ DiagnosticCode::Ra("trait-impl-incorrect-safety", Severity::Error),
+ if d.should_be_safe {
+ "unsafe impl for safe trait"
+ } else {
+ "impl for unsafe trait needs to be unsafe"
+ },
+ adjusted_display_range::<ast::Impl>(
+ ctx,
+ InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
+ &|impl_| {
+ if d.should_be_safe {
+ Some(match (impl_.unsafe_token(), impl_.impl_token()) {
+ (None, None) => return None,
+ (None, Some(t)) | (Some(t), None) => t.text_range(),
+ (Some(t1), Some(t2)) => t1.text_range().cover(t2.text_range()),
+ })
+ } else {
+ impl_.impl_token().map(|t| t.text_range())
+ }
+ },
+ ),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn simple() {
+ check_diagnostics(
+ r#"
+trait Safe {}
+unsafe trait Unsafe {}
+
+ impl Safe for () {}
+
+ impl Unsafe for () {}
+//^^^^ error: impl for unsafe trait needs to be unsafe
+
+ unsafe impl Safe for () {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+ unsafe impl Unsafe for () {}
+"#,
+ );
+ }
+
+ #[test]
+ fn drop_may_dangle() {
+ check_diagnostics(
+ r#"
+#[lang = "drop"]
+trait Drop {}
+struct S<T>;
+struct L<'l>;
+
+ impl<T> Drop for S<T> {}
+
+ impl<#[may_dangle] T> Drop for S<T> {}
+//^^^^ error: impl for unsafe trait needs to be unsafe
+
+ unsafe impl<T> Drop for S<T> {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+ unsafe impl<#[may_dangle] T> Drop for S<T> {}
+
+ impl<'l> Drop for L<'l> {}
+
+ impl<#[may_dangle] 'l> Drop for L<'l> {}
+//^^^^ error: impl for unsafe trait needs to be unsafe
+
+ unsafe impl<'l> Drop for L<'l> {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+ unsafe impl<#[may_dangle] 'l> Drop for L<'l> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn negative() {
+ check_diagnostics(
+ r#"
+trait Trait {}
+
+ impl !Trait for () {}
+
+ unsafe impl !Trait for () {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+unsafe trait UnsafeTrait {}
+
+ impl !UnsafeTrait for () {}
+
+ unsafe impl !UnsafeTrait for () {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+
+"#,
+ );
+ }
+
+ #[test]
+ fn inherent() {
+ check_diagnostics(
+ r#"
+struct S;
+
+ impl S {}
+
+ unsafe impl S {}
+//^^^^^^^^^^^ error: unsafe impl for safe trait
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
new file mode 100644
index 000000000..56188cddf
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -0,0 +1,129 @@
+use hir::InFile;
+use itertools::Itertools;
+use syntax::{ast, AstNode};
+
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: trait-impl-missing-assoc_item
+//
+// Diagnoses missing trait items in a trait impl.
+pub(crate) fn trait_impl_missing_assoc_item(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplMissingAssocItems,
+) -> Diagnostic {
+ let missing = d.missing.iter().format_with(", ", |(name, item), f| {
+ f(&match *item {
+ hir::AssocItem::Function(_) => "`fn ",
+ hir::AssocItem::Const(_) => "`const ",
+ hir::AssocItem::TypeAlias(_) => "`type ",
+ })?;
+ f(&name.display(ctx.sema.db))?;
+ f(&"`")
+ });
+ Diagnostic::new(
+ DiagnosticCode::RustcHardError("E0046"),
+ format!("not all trait items implemented, missing: {missing}"),
+ adjusted_display_range::<ast::Impl>(
+ ctx,
+ InFile { file_id: d.file_id, value: d.impl_.syntax_node_ptr() },
+ &|impl_| impl_.trait_().map(|t| t.syntax().text_range()),
+ ),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn trait_with_default_value() {
+ check_diagnostics(
+ r#"
+trait Marker {
+ const FLAG: bool = false;
+}
+struct Foo;
+impl Marker for Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn simple() {
+ check_diagnostics(
+ r#"
+trait Trait {
+ const C: ();
+ type T;
+ fn f();
+}
+
+impl Trait for () {
+ const C: () = ();
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`, `type T`, `fn f`
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn default() {
+ check_diagnostics(
+ r#"
+trait Trait {
+ const C: ();
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ const C: () = ();
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+ type T = ();
+ fn f() {}
+}
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+ type T = ();
+ }
+
+impl Trait for () {
+ //^^^^^ error: not all trait items implemented, missing: `const C`
+}
+
+"#,
+ );
+ }
+
+ #[test]
+ fn negative_impl() {
+ check_diagnostics(
+ r#"
+trait Trait {
+ fn item();
+}
+
+// Negative impls don't require any items (in fact, the forbid providing any)
+impl !Trait for () {}
+"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
new file mode 100644
index 000000000..159d87d26
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
@@ -0,0 +1,106 @@
+use hir::InFile;
+
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: trait-impl-orphan
+//
+// Only traits defined in the current crate can be implemented for arbitrary types
+pub(crate) fn trait_impl_orphan(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplOrphan,
+) -> Diagnostic {
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0117"),
+ format!("only traits defined in the current crate can be implemented for arbitrary types"),
+ InFile::new(d.file_id, d.impl_.clone().into()),
+ )
+ // Not yet checked for false positives
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn simple() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo {}
+//- /bar.rs crate:bar
+pub struct Bar;
+//- /main.rs crate:main deps:foo,bar
+struct LocalType;
+trait LocalTrait {}
+ impl foo::Foo for bar::Bar {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+impl foo::Foo for LocalType {}
+impl LocalTrait for bar::Bar {}
+"#,
+ );
+ }
+
+ #[test]
+ fn generics() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo<T> {}
+//- /bar.rs crate:bar
+pub struct Bar<T>(T);
+//- /main.rs crate:main deps:foo,bar
+struct LocalType<T>;
+trait LocalTrait<T> {}
+ impl<T> foo::Foo<T> for bar::Bar<T> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+
+ impl<T> foo::Foo<T> for bar::Bar<LocalType<T>> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+
+ impl<T> foo::Foo<LocalType<T>> for bar::Bar<T> {}
+
+ impl<T> foo::Foo<bar::Bar<LocalType<T>>> for bar::Bar<LocalType<T>> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+"#,
+ );
+ }
+
+ #[test]
+ fn fundamental() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo<T> {}
+//- /bar.rs crate:bar
+pub struct Bar<T>(T);
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T>(T);
+//- /main.rs crate:main deps:foo,bar
+struct LocalType;
+ impl<T> foo::Foo<T> for bar::Box<T> {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: only traits defined in the current crate can be implemented for arbitrary types
+ impl<T> foo::Foo<T> for &LocalType {}
+ impl<T> foo::Foo<T> for bar::Box<LocalType> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn dyn_object() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Foo<T> {}
+//- /bar.rs crate:bar
+pub struct Bar;
+//- /main.rs crate:main deps:foo,bar
+trait LocalTrait {}
+impl<T> foo::Foo<T> for dyn LocalTrait {}
+impl<T> foo::Foo<dyn LocalTrait> for Bar {}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
new file mode 100644
index 000000000..820014391
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -0,0 +1,79 @@
+use hir::{Const, Function, HasSource, TypeAlias};
+use ide_db::base_db::FileRange;
+
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: trait-impl-redundant-assoc_item
+//
+// Diagnoses redundant trait items in a trait impl.
+pub(crate) fn trait_impl_redundant_assoc_item(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::TraitImplRedundantAssocItems,
+) -> Diagnostic {
+ let name = d.assoc_item.0.clone();
+ let assoc_item = d.assoc_item.1;
+ let db = ctx.sema.db;
+
+ let default_range = d.impl_.syntax_node_ptr().text_range();
+ let trait_name = d.trait_.name(db).to_smol_str();
+
+ let (redundant_item_name, diagnostic_range) = match assoc_item {
+ hir::AssocItem::Function(id) => (
+ format!("`fn {}`", name.display(db)),
+ Function::from(id)
+ .source(db)
+ .map(|it| it.syntax().value.text_range())
+ .unwrap_or(default_range),
+ ),
+ hir::AssocItem::Const(id) => (
+ format!("`const {}`", name.display(db)),
+ Const::from(id)
+ .source(db)
+ .map(|it| it.syntax().value.text_range())
+ .unwrap_or(default_range),
+ ),
+ hir::AssocItem::TypeAlias(id) => (
+ format!("`type {}`", name.display(db)),
+ TypeAlias::from(id)
+ .source(db)
+ .map(|it| it.syntax().value.text_range())
+ .unwrap_or(default_range),
+ ),
+ };
+
+ Diagnostic::new(
+ DiagnosticCode::RustcHardError("E0407"),
+ format!("{redundant_item_name} is not a member of trait `{trait_name}`"),
+ FileRange { file_id: d.file_id.file_id().unwrap(), range: diagnostic_range },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn trait_with_default_value() {
+ check_diagnostics(
+ r#"
+trait Marker {
+ const FLAG: bool = false;
+ fn boo();
+ fn foo () {}
+}
+struct Foo;
+impl Marker for Foo {
+ type T = i32;
+ //^^^^^^^^^^^^^ error: `type T` is not a member of trait `Marker`
+
+ const FLAG: bool = true;
+
+ fn bar() {}
+ //^^^^^^^^^^^ error: `fn bar` is not a member of trait `Marker`
+
+ fn boo() {}
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 15bd28c00..70beb9468 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1,5 +1,4 @@
-use either::Either;
-use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type};
+use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{
ast::{self, BlockExpr, ExprStmt},
@@ -14,9 +13,11 @@ use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, Dia
// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
- let display_range = match &d.expr_or_pat {
- Either::Left(expr) => {
- adjusted_display_range::<ast::Expr>(ctx, expr.clone().map(|it| it.into()), &|expr| {
+ let display_range = match &d.expr_or_pat.value {
+ expr if ast::Expr::can_cast(expr.kind()) => adjusted_display_range::<ast::Expr>(
+ ctx,
+ InFile { file_id: d.expr_or_pat.file_id, value: expr.syntax_node_ptr() },
+ &|expr| {
let salient_token_range = match expr {
ast::Expr::IfExpr(it) => it.if_token()?.text_range(),
ast::Expr::LoopExpr(it) => it.loop_token()?.text_range(),
@@ -32,11 +33,12 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
cov_mark::hit!(type_mismatch_range_adjustment);
Some(salient_token_range)
- })
- }
- Either::Right(pat) => {
- ctx.sema.diagnostics_display_range(pat.clone().map(|it| it.into())).range
- }
+ },
+ ),
+ pat => ctx.sema.diagnostics_display_range(InFile {
+ file_id: d.expr_or_pat.file_id,
+ value: pat.syntax_node_ptr(),
+ }),
};
let mut diag = Diagnostic::new(
DiagnosticCode::RustcHardError("E0308"),
@@ -57,14 +59,12 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
- match &d.expr_or_pat {
- Either::Left(expr_ptr) => {
- add_reference(ctx, d, expr_ptr, &mut fixes);
- add_missing_ok_or_some(ctx, d, expr_ptr, &mut fixes);
- remove_semicolon(ctx, d, expr_ptr, &mut fixes);
- str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
- }
- Either::Right(_pat_ptr) => {}
+ if let Some(expr_ptr) = d.expr_or_pat.value.clone().cast::<ast::Expr>() {
+ let expr_ptr = &InFile { file_id: d.expr_or_pat.file_id, value: expr_ptr.clone() };
+ add_reference(ctx, d, expr_ptr, &mut fixes);
+ add_missing_ok_or_some(ctx, d, expr_ptr, &mut fixes);
+ remove_semicolon(ctx, d, expr_ptr, &mut fixes);
+ str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
}
if fixes.is_empty() {
@@ -80,7 +80,7 @@ fn add_reference(
expr_ptr: &InFile<AstPtr<ast::Expr>>,
acc: &mut Vec<Assist>,
) -> Option<()> {
- let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())).range;
+ let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into()));
let (_, mutability) = d.expected.as_reference()?;
let actual_with_ref = Type::reference(&d.actual, mutability);
@@ -90,10 +90,9 @@ fn add_reference(
let ampersands = format!("&{}", mutability.as_keyword_for_ref());
- let edit = TextEdit::insert(range.start(), ampersands);
- let source_change =
- SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
- acc.push(fix("add_reference_here", "Add reference here", source_change, range));
+ let edit = TextEdit::insert(range.range.start(), ampersands);
+ let source_change = SourceChange::from_text_edit(range.file_id, edit);
+ acc.push(fix("add_reference_here", "Add reference here", source_change, range.range));
Some(())
}
@@ -205,7 +204,7 @@ fn main() {
test(123);
//^^^ 💡 error: expected &i32, found i32
}
-fn test(arg: &i32) {}
+fn test(_arg: &i32) {}
"#,
);
}
@@ -217,13 +216,13 @@ fn test(arg: &i32) {}
fn main() {
test(123$0);
}
-fn test(arg: &i32) {}
+fn test(_arg: &i32) {}
"#,
r#"
fn main() {
test(&123);
}
-fn test(arg: &i32) {}
+fn test(_arg: &i32) {}
"#,
);
}
@@ -235,13 +234,13 @@ fn test(arg: &i32) {}
fn main() {
test($0123);
}
-fn test(arg: &mut i32) {}
+fn test(_arg: &mut i32) {}
"#,
r#"
fn main() {
test(&mut 123);
}
-fn test(arg: &mut i32) {}
+fn test(_arg: &mut i32) {}
"#,
);
}
@@ -254,13 +253,13 @@ fn test(arg: &mut i32) {}
fn main() {
test($0[1, 2, 3]);
}
-fn test(arg: &[i32]) {}
+fn test(_arg: &[i32]) {}
"#,
r#"
fn main() {
test(&[1, 2, 3]);
}
-fn test(arg: &[i32]) {}
+fn test(_arg: &[i32]) {}
"#,
);
}
@@ -274,24 +273,26 @@ struct Foo;
struct Bar;
impl core::ops::Deref for Foo {
type Target = Bar;
+ fn deref(&self) -> &Self::Target { loop {} }
}
fn main() {
test($0Foo);
}
-fn test(arg: &Bar) {}
+fn test(_arg: &Bar) {}
"#,
r#"
struct Foo;
struct Bar;
impl core::ops::Deref for Foo {
type Target = Bar;
+ fn deref(&self) -> &Self::Target { loop {} }
}
fn main() {
test(&Foo);
}
-fn test(arg: &Bar) {}
+fn test(_arg: &Bar) {}
"#,
);
}
@@ -305,7 +306,7 @@ fn main() {
}
struct Test;
impl Test {
- fn call_by_ref(&self, arg: &i32) {}
+ fn call_by_ref(&self, _arg: &i32) {}
}
"#,
r#"
@@ -314,7 +315,7 @@ fn main() {
}
struct Test;
impl Test {
- fn call_by_ref(&self, arg: &i32) {}
+ fn call_by_ref(&self, _arg: &i32) {}
}
"#,
);
@@ -345,7 +346,7 @@ macro_rules! thousand {
1000_u64
};
}
-fn test(foo: &u64) {}
+fn test(_foo: &u64) {}
fn main() {
test($0thousand!());
}
@@ -356,7 +357,7 @@ macro_rules! thousand {
1000_u64
};
}
-fn test(foo: &u64) {}
+fn test(_foo: &u64) {}
fn main() {
test(&thousand!());
}
@@ -369,12 +370,12 @@ fn main() {
check_fix(
r#"
fn main() {
- let test: &mut i32 = $0123;
+ let _test: &mut i32 = $0123;
}
"#,
r#"
fn main() {
- let test: &mut i32 = &mut 123;
+ let _test: &mut i32 = &mut 123;
}
"#,
);
@@ -411,7 +412,7 @@ fn div(x: i32, y: i32) -> Option<i32> {
fn f<const N: u64>() -> Rate<N> { // FIXME: add some error
loop {}
}
- fn run(t: Rate<5>) {
+ fn run(_t: Rate<5>) {
}
fn main() {
run(f()) // FIXME: remove this error
@@ -426,7 +427,7 @@ fn div(x: i32, y: i32) -> Option<i32> {
check_diagnostics(
r#"
pub struct Rate<T, const NOM: u32, const DENOM: u32>(T);
- fn run(t: Rate<u32, 1, 1>) {
+ fn run(_t: Rate<u32, 1, 1>) {
}
fn main() {
run(Rate::<_, _, _>(5));
@@ -650,7 +651,7 @@ fn h() {
r#"
struct X<T>(T);
-fn foo(x: X<Unknown>) {}
+fn foo(_x: X<Unknown>) {}
fn test1() {
// Unknown might be `i32`, so we should not emit type mismatch here.
foo(X(42));
@@ -736,4 +737,19 @@ fn g() { return; }
"#,
);
}
+
+ #[test]
+ fn smoke_test_inner_items() {
+ check_diagnostics(
+ r#"
+fn f() {
+ fn inner() -> i32 {
+ return;
+ // ^^^^^^ error: expected i32, found ()
+ 0
+ }
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
index 4af672271..a740e332b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -26,14 +26,14 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
)
};
- Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range)
+ Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range)
.with_fixes(fixes)
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.expr.file_id);
- let original_range =
+ let (original_range, _) =
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?;
let mut assists = vec![];
@@ -142,8 +142,8 @@ fn t<T>() -> T { loop {} }
check_diagnostics(
r#"
fn main() {
- let x = [(); _];
- let y: [(); 10] = [(); _];
+ let _x = [(); _];
+ let _y: [(); 10] = [(); _];
_ = 0;
(_,) = (1,);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index e04f27c27..becc24ab2 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -4,7 +4,7 @@ use std::iter;
use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
use ide_db::{
- base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt},
+ base_db::{FileId, FileLoader, FileRange, SourceDatabase, SourceDatabaseExt},
source_change::SourceChange,
RootDatabase,
};
@@ -46,8 +46,12 @@ pub(crate) fn unlinked_file(
.unwrap_or(range);
acc.push(
- Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range)
- .with_fixes(fixes),
+ Diagnostic::new(
+ DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning),
+ message,
+ FileRange { file_id, range },
+ )
+ .with_fixes(fixes),
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 0758706e4..321459412 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, AstPtr};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-field
//
@@ -22,15 +22,24 @@ pub(crate) fn unresolved_field(
} else {
""
};
- Diagnostic::new_with_syntax_node_ptr(
- ctx,
+ Diagnostic::new(
DiagnosticCode::RustcHardError("E0559"),
format!(
"no field `{}` on type `{}`{method_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- d.expr.clone().map(|it| it.into()),
+ adjusted_display_range_new(ctx, d.expr, &|expr| {
+ Some(
+ match expr {
+ ast::Expr::MethodCallExpr(it) => it.name_ref(),
+ ast::Expr::FieldExpr(it) => it.name_ref(),
+ _ => None,
+ }?
+ .syntax()
+ .text_range(),
+ )
+ }),
)
.with_fixes(fixes(ctx, d))
.experimental()
@@ -79,7 +88,7 @@ mod tests {
r#"
fn main() {
().foo;
- // ^^^^^^ error: no field `foo` on type `()`
+ // ^^^ error: no field `foo` on type `()`
}
"#,
);
@@ -95,7 +104,7 @@ impl Foo {
}
fn foo() {
Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+ // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
}
"#,
);
@@ -112,7 +121,7 @@ trait Bar {
impl Bar for Foo {}
fn foo() {
Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+ // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
}
"#,
);
@@ -131,7 +140,7 @@ impl Bar for Foo {
}
fn foo() {
Foo.bar;
- // ^^^^^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
+ // ^^^ 💡 error: no field `bar` on type `Foo`, but a method with a similar name exists
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index 33e7c2e37..c8ff54cba 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -70,4 +70,16 @@ self::m!(); self::m2!();
"#,
);
}
+
+ #[test]
+ fn regression_panic_with_inner_attribute_in_presence_of_unresolved_crate() {
+ check_diagnostics(
+ r#"
+ mod _test_inner {
+ #![empty_attr]
+ //^^^^^^^^^^^^^^ error: unresolved macro `empty_attr`
+ }
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index ae9f6744c..464b0a710 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, TextRange};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-method
//
@@ -22,15 +22,24 @@ pub(crate) fn unresolved_method(
} else {
""
};
- Diagnostic::new_with_syntax_node_ptr(
- ctx,
+ Diagnostic::new(
DiagnosticCode::RustcHardError("E0599"),
format!(
"no method `{}` on type `{}`{field_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- d.expr.clone().map(|it| it.into()),
+ adjusted_display_range_new(ctx, d.expr, &|expr| {
+ Some(
+ match expr {
+ ast::Expr::MethodCallExpr(it) => it.name_ref(),
+ ast::Expr::FieldExpr(it) => it.name_ref(),
+ _ => None,
+ }?
+ .syntax()
+ .text_range(),
+ )
+ }),
)
.with_fixes(fixes(ctx, d))
.experimental()
@@ -92,7 +101,41 @@ mod tests {
r#"
fn main() {
().foo();
- // ^^^^^^^^ error: no method `foo` on type `()`
+ // ^^^ error: no method `foo` on type `()`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn smoke_test_in_macro_def_site() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ ($rcv:expr) => {
+ $rcv.foo()
+ }
+}
+fn main() {
+ m!(());
+ // ^^^^^^ error: no method `foo` on type `()`
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn smoke_test_in_macro_call_site() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ ($ident:ident) => {
+ ().$ident()
+ }
+}
+fn main() {
+ m!(foo);
+ // ^^^ error: no method `foo` on type `()`
}
"#,
);
@@ -105,7 +148,7 @@ fn main() {
struct Foo { bar: i32 }
fn foo() {
Foo { bar: i32 }.bar();
- // ^^^^^^^^^^^^^^^^^^^^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
+ // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index be24e50c9..e90d385ba 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -1,4 +1,4 @@
-use hir::db::ExpandDatabase;
+use hir::{db::ExpandDatabase, HirFileIdExt};
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
use itertools::Itertools;
use syntax::AstNode;
@@ -87,7 +87,12 @@ mod baz {}
"E0583",
),
message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
- range: 0..8,
+ range: FileRange {
+ file_id: FileId(
+ 0,
+ ),
+ range: 0..8,
+ },
severity: Error,
unused: false,
experimental: false,
@@ -150,11 +155,9 @@ mod baz {}
],
),
main_node: Some(
- InFile {
+ InFileWrapper {
file_id: FileId(
- FileId(
- 0,
- ),
+ 0,
),
value: MODULE@0..8
MOD_KW@0..3 "mod"
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs
new file mode 100644
index 000000000..28ccf474b
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs
@@ -0,0 +1,111 @@
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
+
+// Diagnostic: unused-variables
+//
+// This diagnostic is triggered when a local variable is not used.
+pub(crate) fn unused_variables(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnusedVariable,
+) -> Diagnostic {
+ let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcLint("unused_variables"),
+ "unused variable",
+ ast,
+ )
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unused_variables_simple() {
+ check_diagnostics(
+ r#"
+//- minicore: fn
+struct Foo { f1: i32, f2: i64 }
+
+fn f(kkk: i32) {}
+ //^^^ warn: unused variable
+fn main() {
+ let a = 2;
+ //^ warn: unused variable
+ let b = 5;
+ // note: `unused variable` implies `unused mut`, so we should not emit both at the same time.
+ let mut c = f(b);
+ //^^^^^ warn: unused variable
+ let (d, e) = (3, 5);
+ //^ warn: unused variable
+ let _ = e;
+ let f1 = 2;
+ let f2 = 5;
+ let f = Foo { f1, f2 };
+ match f {
+ Foo { f1, f2 } => {
+ //^^ warn: unused variable
+ _ = f2;
+ }
+ }
+ let g = false;
+ if g {}
+ let h: fn() -> i32 = || 2;
+ let i = h();
+ //^ warn: unused variable
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unused_self() {
+ check_diagnostics(
+ r#"
+struct S {
+}
+impl S {
+ fn owned_self(self, u: i32) {}
+ //^ warn: unused variable
+ fn ref_self(&self, u: i32) {}
+ //^ warn: unused variable
+ fn ref_mut_self(&mut self, u: i32) {}
+ //^ warn: unused variable
+ fn owned_mut_self(mut self) {}
+ //^^^^^^^^ 💡 warn: variable does not need to be mutable
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn allow_unused_variables_for_identifiers_starting_with_underline() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let _x = 2;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn respect_lint_attributes_for_unused_variables() {
+ check_diagnostics(
+ r#"
+fn main() {
+ #[allow(unused_variables)]
+ let x = 2;
+}
+
+#[deny(unused)]
+fn main2() {
+ let x = 2;
+ //^ error: unused variable
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
index c4ac59ec2..8dce2af23 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -1,5 +1,8 @@
use hir::InFile;
-use ide_db::{base_db::FileId, source_change::SourceChange};
+use ide_db::{
+ base_db::{FileId, FileRange},
+ source_change::SourceChange,
+};
use itertools::Itertools;
use syntax::{ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
@@ -38,7 +41,7 @@ pub(crate) fn useless_braces(
Diagnostic::new(
DiagnosticCode::RustcLint("unused_braces"),
"Unnecessary braces in use statement".to_string(),
- use_range,
+ FileRange { file_id, range: use_range },
)
.with_main_node(InFile::new(file_id.into(), node.clone()))
.with_fixes(Some(vec![fix(
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index ebe197a67..579386c72 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -23,7 +23,7 @@
//! There are also a couple of ad-hoc diagnostics implemented directly here, we
//! don't yet have a great pattern for how to do them properly.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod handlers {
pub(crate) mod break_outside_of_loop;
@@ -44,6 +44,10 @@ mod handlers {
pub(crate) mod private_assoc_item;
pub(crate) mod private_field;
pub(crate) mod replace_filter_map_next_with_find_map;
+ pub(crate) mod trait_impl_orphan;
+ pub(crate) mod trait_impl_incorrect_safety;
+ pub(crate) mod trait_impl_missing_assoc_item;
+ pub(crate) mod trait_impl_redundant_assoc_item;
pub(crate) mod typed_hole;
pub(crate) mod type_mismatch;
pub(crate) mod unimplemented_builtin_macro;
@@ -56,6 +60,7 @@ mod handlers {
pub(crate) mod unresolved_proc_macro;
pub(crate) mod undeclared_label;
pub(crate) mod unreachable_label;
+ pub(crate) mod unused_variables;
// The handlers below are unusual, the implement the diagnostics as well.
pub(crate) mod field_shorthand;
@@ -85,11 +90,11 @@ use stdx::never;
use syntax::{
algo::find_node_at_range,
ast::{self, AstNode},
- SyntaxNode, SyntaxNodePtr, TextRange,
+ AstPtr, SyntaxNode, SyntaxNodePtr, TextRange,
};
// FIXME: Make this an enum
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum DiagnosticCode {
RustcHardError(&'static str),
RustcLint(&'static str),
@@ -129,7 +134,7 @@ impl DiagnosticCode {
pub struct Diagnostic {
pub code: DiagnosticCode,
pub message: String,
- pub range: TextRange,
+ pub range: FileRange,
pub severity: Severity,
pub unused: bool,
pub experimental: bool,
@@ -139,7 +144,7 @@ pub struct Diagnostic {
}
impl Diagnostic {
- fn new(code: DiagnosticCode, message: impl Into<String>, range: TextRange) -> Diagnostic {
+ fn new(code: DiagnosticCode, message: impl Into<String>, range: FileRange) -> Diagnostic {
let message = message.into();
Diagnostic {
code,
@@ -168,7 +173,7 @@ impl Diagnostic {
node: InFile<SyntaxNodePtr>,
) -> Diagnostic {
let file_id = node.file_id;
- Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range)
+ Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()))
.with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id))))
}
@@ -193,7 +198,7 @@ impl Diagnostic {
}
}
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Severity {
Error,
Warning,
@@ -224,6 +229,7 @@ pub struct DiagnosticsConfig {
// FIXME: We may want to include a whole `AssistConfig` here
pub insert_use: InsertUseConfig,
pub prefer_no_std: bool,
+ pub prefer_prelude: bool,
}
impl DiagnosticsConfig {
@@ -246,6 +252,7 @@ impl DiagnosticsConfig {
skip_glob_imports: false,
},
prefer_no_std: false,
+ prefer_prelude: true,
}
}
}
@@ -261,7 +268,7 @@ impl DiagnosticsContext<'_> {
&self,
node: &InFile<SyntaxNodePtr>,
precise_location: Option<TextRange>,
- ) -> TextRange {
+ ) -> FileRange {
let sema = &self.sema;
(|| {
let precise_location = precise_location?;
@@ -274,10 +281,11 @@ impl DiagnosticsContext<'_> {
}
})()
.unwrap_or_else(|| sema.diagnostics_display_range(node.clone()))
- .range
}
}
+/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files
+/// due to macros.
pub fn diagnostics(
db: &RootDatabase,
config: &DiagnosticsConfig,
@@ -294,7 +302,7 @@ pub fn diagnostics(
Diagnostic::new(
DiagnosticCode::RustcHardError("syntax-error"),
format!("Syntax Error: {err}"),
- err.range(),
+ FileRange { file_id, range: err.range() },
)
}));
@@ -355,6 +363,10 @@ pub fn diagnostics(
AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d),
AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d),
AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d),
+ AnyDiagnostic::TraitImplIncorrectSafety(d) => handlers::trait_impl_incorrect_safety::trait_impl_incorrect_safety(&ctx, &d),
+ AnyDiagnostic::TraitImplMissingAssocItems(d) => handlers::trait_impl_missing_assoc_item::trait_impl_missing_assoc_item(&ctx, &d),
+ AnyDiagnostic::TraitImplRedundantAssocItems(d) => handlers::trait_impl_redundant_assoc_item::trait_impl_redundant_assoc_item(&ctx, &d),
+ AnyDiagnostic::TraitImplOrphan(d) => handlers::trait_impl_orphan::trait_impl_orphan(&ctx, &d),
AnyDiagnostic::TypedHole(d) => handlers::typed_hole::typed_hole(&ctx, &d),
AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),
AnyDiagnostic::UndeclaredLabel(d) => handlers::undeclared_label::undeclared_label(&ctx, &d),
@@ -368,6 +380,7 @@ pub fn diagnostics(
AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d),
AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled),
AnyDiagnostic::UnusedMut(d) => handlers::mutability_errors::unused_mut(&ctx, &d),
+ AnyDiagnostic::UnusedVariable(d) => handlers::unused_variables::unused_variables(&ctx, &d),
AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
AnyDiagnostic::MismatchedTupleStructPatArgCount(d) => handlers::mismatched_arg_count::mismatched_tuple_struct_pat_arg_count(&ctx, &d),
};
@@ -559,12 +572,28 @@ fn adjusted_display_range<N: AstNode>(
ctx: &DiagnosticsContext<'_>,
diag_ptr: InFile<SyntaxNodePtr>,
adj: &dyn Fn(N) -> Option<TextRange>,
-) -> TextRange {
+) -> FileRange {
let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr);
let source_file = ctx.sema.db.parse(file_id);
- find_node_at_range::<N>(&source_file.syntax_node(), range)
- .filter(|it| it.syntax().text_range() == range)
- .and_then(adj)
- .unwrap_or(range)
+ FileRange {
+ file_id,
+ range: find_node_at_range::<N>(&source_file.syntax_node(), range)
+ .filter(|it| it.syntax().text_range() == range)
+ .and_then(adj)
+ .unwrap_or(range),
+ }
+}
+
+// FIXME Replace the one above with this one?
+fn adjusted_display_range_new<N: AstNode>(
+ ctx: &DiagnosticsContext<'_>,
+ diag_ptr: InFile<AstPtr<N>>,
+ adj: &dyn Fn(N) -> Option<TextRange>,
+) -> FileRange {
+ let source_file = ctx.sema.parse_or_expand(diag_ptr.file_id);
+ let node = diag_ptr.value.to_node(&source_file);
+ diag_ptr
+ .with_value(adj(node).unwrap_or_else(|| diag_ptr.value.text_range()))
+ .original_node_file_range_rooted(ctx.sema.db)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
index ee0e03549..48e0363c9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -5,8 +5,9 @@ use expect_test::Expect;
use ide_db::{
assists::AssistResolveStrategy,
base_db::{fixture::WithFixture, SourceDatabaseExt},
- RootDatabase,
+ LineIndexDatabase, RootDatabase,
};
+use itertools::Itertools;
use stdx::trim_indent;
use test_utils::{assert_eq_text, extract_annotations, MiniCore};
@@ -43,7 +44,8 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.pop()
.expect("no diagnostics");
- let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
+ let fix =
+ &diagnostic.fixes.expect(&format!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
let actual = {
let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap();
@@ -102,32 +104,39 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) {
#[track_caller]
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
let (db, files) = RootDatabase::with_many_files(ra_fixture);
+ let mut annotations = files
+ .iter()
+ .copied()
+ .flat_map(|file_id| {
+ super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id).into_iter().map(
+ |d| {
+ let mut annotation = String::new();
+ if let Some(fixes) = &d.fixes {
+ assert!(!fixes.is_empty());
+ annotation.push_str("💡 ")
+ }
+ annotation.push_str(match d.severity {
+ Severity::Error => "error",
+ Severity::WeakWarning => "weak",
+ Severity::Warning => "warn",
+ Severity::Allow => "allow",
+ });
+ annotation.push_str(": ");
+ annotation.push_str(&d.message);
+ (d.range, annotation)
+ },
+ )
+ })
+ .map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation)))
+ .into_group_map();
for file_id in files {
- let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
+ let line_index = db.line_index(file_id);
+ let mut actual = annotations.remove(&file_id).unwrap_or_default();
let expected = extract_annotations(&db.file_text(file_id));
- let mut actual = diagnostics
- .into_iter()
- .map(|d| {
- let mut annotation = String::new();
- if let Some(fixes) = &d.fixes {
- assert!(!fixes.is_empty());
- annotation.push_str("💡 ")
- }
- annotation.push_str(match d.severity {
- Severity::Error => "error",
- Severity::WeakWarning => "weak",
- Severity::Warning => "warn",
- Severity::Allow => "allow",
- });
- annotation.push_str(": ");
- annotation.push_str(&d.message);
- (d.range, annotation)
- })
- .collect::<Vec<_>>();
actual.sort_by_key(|(range, _)| range.start());
if expected.is_empty() {
- // makes minicore smoke test debugable
+ // makes minicore smoke test debuggable
for (e, _) in &actual {
eprintln!(
"Code in range {e:?} = {}",
@@ -136,8 +145,16 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
}
}
if expected != actual {
- let fneg = expected.iter().filter(|x| !actual.contains(x)).collect::<Vec<_>>();
- let fpos = actual.iter().filter(|x| !expected.contains(x)).collect::<Vec<_>>();
+ let fneg = expected
+ .iter()
+ .filter(|x| !actual.contains(x))
+ .map(|(range, s)| (line_index.line_col(range.start()), range, s))
+ .collect::<Vec<_>>();
+ let fpos = actual
+ .iter()
+ .filter(|x| !expected.contains(x))
+ .map(|(range, s)| (line_index.line_col(range.start()), range, s))
+ .collect::<Vec<_>>();
panic!("Diagnostic test failed.\nFalse negatives: {fneg:?}\nFalse positives: {fpos:?}");
}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
index 70ed6dea5..56b29f92b 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-itertools = "0.10.5"
+itertools.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
index 66832a0be..d756e7a63 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -3,7 +3,7 @@
//! Allows searching the AST for code that matches one or more patterns and then replacing that code
//! based on a template.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
// Feature: Structural Search and Replace
//
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
index 60fcbbbd3..0312a0f11 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -651,7 +651,7 @@ impl Match {
for (path, resolved_path) in &template.resolved_paths {
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
let mod_path =
- module.find_use_path(sema.db, module_def, false).ok_or_else(|| {
+ module.find_use_path(sema.db, module_def, false, true).ok_or_else(|| {
match_error!("Failed to render template path `{}` at match location")
})?;
self.rendered_template_paths.insert(path.clone(), mod_path);
diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml
index 2aee203c4..0943574ec 100644
--- a/src/tools/rust-analyzer/crates/ide/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml
@@ -14,9 +14,10 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
crossbeam-channel = "0.5.5"
-either = "1.7.0"
-itertools = "0.10.5"
-tracing = "0.1.35"
+arrayvec = "0.7.4"
+either.workspace = true
+itertools.workspace = true
+tracing.workspace = true
oorandom = "11.1.3"
pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.1", default-features = false }
diff --git a/src/tools/rust-analyzer/crates/ide/src/annotations.rs b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
index fb79b5dc2..d7f82b4af 100644
--- a/src/tools/rust-analyzer/crates/ide/src/annotations.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/annotations.rs
@@ -1,4 +1,4 @@
-use hir::{HasSource, InFile, Semantics};
+use hir::{HasSource, InFile, InRealFile, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
defs::Definition,
@@ -149,8 +149,8 @@ pub(crate) fn annotations(
node: InFile<T>,
source_file_id: FileId,
) -> Option<(TextRange, Option<TextRange>)> {
- if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
- if file_id == source_file_id.into() {
+ if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) {
+ if file_id == source_file_id {
return Some((
value.syntax().text_range(),
value.name().map(|name| name.syntax().text_range()),
diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
index f834f2ce5..458b852e2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs
@@ -1,6 +1,8 @@
//! Entry point for call-hierarchy
-use hir::Semantics;
+use std::iter;
+
+use hir::{DescendPreference, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
@@ -66,7 +68,10 @@ pub(crate) fn incoming_calls(
def.try_to_nav(sema.db)
});
if let Some(nav) = nav {
- calls.add(nav, sema.original_range(name.syntax()).range);
+ calls.add(nav.call_site, sema.original_range(name.syntax()).range);
+ if let Some(other) = nav.def_site {
+ calls.add(other, sema.original_range(name.syntax()).range);
+ }
}
}
}
@@ -87,7 +92,7 @@ pub(crate) fn outgoing_calls(
})?;
let mut calls = CallLocations::default();
- sema.descend_into_macros(token, offset)
+ sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
.filter_map(|item| match item {
@@ -117,8 +122,9 @@ pub(crate) fn outgoing_calls(
function.try_to_nav(db).zip(Some(range))
}
}?;
- Some((nav_target, range))
+ Some(nav_target.into_iter().zip(iter::repeat(range)))
})
+ .flatten()
.for_each(|(nav, range)| calls.add(nav, range));
Some(calls.into_items())
@@ -149,7 +155,7 @@ mod tests {
fn check_hierarchy(
ra_fixture: &str,
- expected: Expect,
+ expected_nav: Expect,
expected_incoming: Expect,
expected_outgoing: Expect,
) {
@@ -158,7 +164,7 @@ mod tests {
let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
assert_eq!(navs.len(), 1);
let nav = navs.pop().unwrap();
- expected.assert_eq(&nav.debug_render());
+ expected_nav.assert_eq(&nav.debug_render());
let item_pos =
FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 37a177622..9760f9daf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -12,7 +12,9 @@ use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions
use stdx::format_to;
use url::Url;
-use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
+use hir::{
+ db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, HasAttrs,
+};
use ide_db::{
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase},
defs::{Definition, NameClass, NameRefClass},
@@ -144,7 +146,7 @@ pub(crate) fn external_docs(
kind if kind.is_trivia() => 0,
_ => 1,
})?;
- let token = sema.descend_into_macros_single(token, offset);
+ let token = sema.descend_into_macros_single(DescendPreference::None, token);
let node = token.parent()?;
let definition = match_ast! {
@@ -286,7 +288,7 @@ impl DocCommentToken {
let original_start = doc_token.text_range().start();
let relative_comment_offset = offset - original_start - prefix_len;
- sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| {
+ sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| {
let (node, descended_prefix_len) = match_ast! {
match t {
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
@@ -602,7 +604,17 @@ fn filename_and_frag_for_def(
}
Definition::Const(c) => format!("const.{}.html", c.name(db)?.display(db.upcast())),
Definition::Static(s) => format!("static.{}.html", s.name(db).display(db.upcast())),
- Definition::Macro(mac) => format!("macro.{}.html", mac.name(db).display(db.upcast())),
+ Definition::Macro(mac) => match mac.kind(db) {
+ hir::MacroKind::Declarative
+ | hir::MacroKind::BuiltIn
+ | hir::MacroKind::Attr
+ | hir::MacroKind::ProcMacro => {
+ format!("macro.{}.html", mac.name(db).display(db.upcast()))
+ }
+ hir::MacroKind::Derive => {
+ format!("derive.{}.html", mac.name(db).display(db.upcast()))
+ }
+ },
Definition::Field(field) => {
let def = match field.parent_def(db) {
hir::VariantDef::Struct(it) => Definition::Adt(it.into()),
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
index 9ae70ae66..f388aea4c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -1,4 +1,4 @@
-use std::ffi::OsStr;
+use std::{ffi::OsStr, iter};
use expect_test::{expect, Expect};
use hir::Semantics;
@@ -63,10 +63,12 @@ fn check_doc_links(ra_fixture: &str) {
let defs = extract_definitions_from_docs(&docs);
let actual: Vec<_> = defs
.into_iter()
- .map(|(_, link, ns)| {
+ .flat_map(|(_, link, ns)| {
let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns)
.unwrap_or_else(|| panic!("Failed to resolve {link}"));
- let nav_target = def.try_to_nav(sema.db).unwrap();
+ def.try_to_nav(sema.db).unwrap().into_iter().zip(iter::repeat(link))
+ })
+ .map(|(nav_target, link)| {
let range =
FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
(range, link)
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
index 119a9c7c3..024053eff 100644
--- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -1,4 +1,4 @@
-use hir::Semantics;
+use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics};
use ide_db::{
base_db::FileId, helpers::pick_best_token,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
@@ -14,12 +14,12 @@ pub struct ExpandedMacro {
// Feature: Expand Macro Recursively
//
-// Shows the full macro expansion of the macro at current cursor.
+// Shows the full macro expansion of the macro at the current caret position.
//
// |===
// | Editor | Action Name
//
-// | VS Code | **rust-analyzer: Expand macro recursively**
+// | VS Code | **rust-analyzer: Expand macro recursively at caret**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
@@ -40,16 +40,20 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
// struct Bar;
// ```
- let derive =
- sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| {
- let hir_file = sema.hir_file_for(&descended.parent()?);
- if !hir_file.is_derive_attr_pseudo_expansion(db) {
+ let derive = sema
+ .descend_into_macros(DescendPreference::None, tok.clone())
+ .into_iter()
+ .find_map(|descended| {
+ let macro_file = sema.hir_file_for(&descended.parent()?).macro_file()?;
+ if !macro_file.is_derive_attr_pseudo_expansion(db) {
return None;
}
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
// up map out of the #[derive] expansion
- let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
+ let InFile { file_id, value: tokens } =
+ hir::InMacroFile::new(macro_file, descended).upmap_once(db);
+ let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?;
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
let expansions = sema.expand_derive_macro(&attr)?;
let idx = attr
diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
index 3d89599c5..b706e959d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs
@@ -1,6 +1,6 @@
use std::iter::successors;
-use hir::Semantics;
+use hir::{DescendPreference, Semantics};
use ide_db::RootDatabase;
use syntax::{
algo::{self, skip_trivia_token},
@@ -108,7 +108,7 @@ fn try_extend_selection(
let node = shallowest_node(&node);
- if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
+ if node.parent().is_some_and(|n| list_kinds.contains(&n.kind())) {
if let Some(range) = extend_list_item(&node) {
return Some(range);
}
@@ -141,9 +141,9 @@ fn extend_tokens_from_range(
// compute original mapped token range
let extended = {
let fst_expanded =
- sema.descend_into_macros_single(first_token.clone(), original_range.start());
+ sema.descend_into_macros_single(DescendPreference::None, first_token.clone());
let lst_expanded =
- sema.descend_into_macros_single(last_token.clone(), original_range.end());
+ sema.descend_into_macros_single(DescendPreference::None, last_token.clone());
let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca);
@@ -154,10 +154,10 @@ fn extend_tokens_from_range(
};
// Compute parent node range
- let validate = |offset: TextSize| {
+ let validate = || {
let extended = &extended;
move |token: &SyntaxToken| -> bool {
- let expanded = sema.descend_into_macros_single(token.clone(), offset);
+ let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone());
let parent = match expanded.parent() {
Some(it) => it,
None => return false,
@@ -171,14 +171,14 @@ fn extend_tokens_from_range(
let token = token.prev_token()?;
skip_trivia_token(token, Direction::Prev)
})
- .take_while(validate(original_range.start()))
+ .take_while(validate())
.last()?;
let last = successors(Some(last_token), |token| {
let token = token.next_token()?;
skip_trivia_token(token, Direction::Next)
})
- .take_while(validate(original_range.end()))
+ .take_while(validate())
.last()?;
let range = first.text_range().cover(last.text_range());
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
index 7e0fab426..fae100743 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -1,4 +1,4 @@
-use hir::{AsAssocItem, Semantics};
+use hir::{AsAssocItem, DescendPreference, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
RootDatabase,
@@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema
- .descend_into_macros(original_token, offset)
+ .descend_into_macros(DescendPreference::None, original_token)
.iter()
.filter_map(|token| {
let parent = token.parent()?;
@@ -66,6 +66,7 @@ pub(crate) fn goto_declaration(
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
item.try_to_nav(db)
})
+ .flatten()
.collect();
if info.is_empty() {
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index e09b9f391..7491879a6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -4,7 +4,7 @@ use crate::{
doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
RangeInfo, TryToNav,
};
-use hir::{AsAssocItem, AssocItem, Semantics};
+use hir::{AsAssocItem, AssocItem, DescendPreference, Semantics};
use ide_db::{
base_db::{AnchoredPath, FileId, FileLoader},
defs::{Definition, IdentClass},
@@ -52,21 +52,34 @@ pub(crate) fn goto_definition(
if let Some(doc_comment) = token_as_doc_comment(&original_token) {
return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| {
let nav = def.try_to_nav(db)?;
- Some(RangeInfo::new(link_range, vec![nav]))
+ Some(RangeInfo::new(link_range, nav.collect()))
});
}
+
+ if let Some((range, resolution)) =
+ sema.check_for_format_args_template(original_token.clone(), offset)
+ {
+ return Some(RangeInfo::new(
+ range,
+ match resolution {
+ Some(res) => def_to_nav(db, Definition::from(res)),
+ None => vec![],
+ },
+ ));
+ }
+
let navs = sema
- .descend_into_macros(original_token.clone(), offset)
+ .descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter()
.filter_map(|token| {
let parent = token.parent()?;
- if let Some(tt) = ast::TokenTree::cast(parent) {
+ if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
return Some(vec![x]);
}
}
Some(
- IdentClass::classify_token(sema, &token)?
+ IdentClass::classify_node(sema, &parent)?
.definitions()
.into_iter()
.flat_map(|def| {
@@ -75,6 +88,7 @@ pub(crate) fn goto_definition(
.resolved_crate(db)
.map(|it| it.root_module().to_nav(sema.db))
.into_iter()
+ .flatten()
.collect();
}
try_filter_trait_item_definition(sema, &def)
@@ -125,6 +139,7 @@ fn try_lookup_include_path(
docs: None,
})
}
+
/// finds the trait definition of an impl'd item, except function
/// e.g.
/// ```rust
@@ -153,13 +168,13 @@ fn try_filter_trait_item_definition(
.iter()
.filter(|itm| discriminant(*itm) == discri_value)
.find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten())
- .map(|it| vec![it])
+ .map(|it| it.collect())
}
}
}
fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
- def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default()
+ def.try_to_nav(db).map(|it| it.collect()).unwrap_or_default()
}
#[cfg(test)]
@@ -399,11 +414,11 @@ fn bar() {
//- /lib.rs
macro_rules! define_fn {
() => (fn foo() {})
+ //^^^
}
define_fn!();
//^^^^^^^^^^^^^
-
fn bar() {
$0foo();
}
@@ -807,18 +822,13 @@ mod confuse_index { fn foo(); }
fn goto_through_format() {
check(
r#"
+//- minicore: fmt
#[macro_export]
macro_rules! format {
($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
}
-#[rustc_builtin_macro]
-#[macro_export]
-macro_rules! format_args {
- ($fmt:expr) => ({ /* compiler built-in */ });
- ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
-}
pub mod __export {
- pub use crate::format_args;
+ pub use core::format_args;
fn foo() {} // for index confusion
}
fn foo() -> i8 {}
@@ -1738,9 +1748,9 @@ macro_rules! foo {
fn $ident(Foo { $ident }: Foo) {}
}
}
-foo!(foo$0);
- //^^^
- //^^^
+ foo!(foo$0);
+ //^^^
+ //^^^
"#,
);
check(
@@ -2057,4 +2067,18 @@ fn f2() {
"#,
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+ let a = "world";
+ // ^
+ format_args!("hello {a$0}");
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index 544c6b423..6384db39d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -1,4 +1,4 @@
-use hir::{AsAssocItem, Impl, Semantics};
+use hir::{AsAssocItem, DescendPreference, Impl, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
helpers::pick_best_token,
@@ -34,7 +34,7 @@ pub(crate) fn goto_implementation(
})?;
let range = original_token.text_range();
let navs =
- sema.descend_into_macros(original_token, offset)
+ sema.descend_into_macros(DescendPreference::None, original_token)
.into_iter()
.filter_map(|token| token.parent().and_then(ast::NameLike::cast))
.filter_map(|node| match &node {
@@ -82,7 +82,11 @@ pub(crate) fn goto_implementation(
}
fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
- Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect()
+ Impl::all_for_type(sema.db, ty)
+ .into_iter()
+ .filter_map(|imp| imp.try_to_nav(sema.db))
+ .flatten()
+ .collect()
}
fn impls_for_trait(
@@ -92,6 +96,7 @@ fn impls_for_trait(
Impl::all_for_trait(sema.db, trait_)
.into_iter()
.filter_map(|imp| imp.try_to_nav(sema.db))
+ .flatten()
.collect()
}
@@ -109,6 +114,7 @@ fn impls_for_trait_item(
})?;
item.try_to_nav(sema.db)
})
+ .flatten()
.collect()
}
@@ -249,7 +255,7 @@ impl T for &Foo {}
r#"
//- minicore: copy, derive
#[derive(Copy)]
-//^^^^^^^^^^^^^^^
+ //^^^^
struct Foo$0;
"#,
);
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
index 955923d76..ad393d980 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
@@ -1,3 +1,4 @@
+use hir::{DescendPreference, GenericParam};
use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase};
use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T};
@@ -30,14 +31,45 @@ pub(crate) fn goto_type_definition(
let mut res = Vec::new();
let mut push = |def: Definition| {
- if let Some(nav) = def.try_to_nav(db) {
- if !res.contains(&nav) {
- res.push(nav);
+ if let Some(navs) = def.try_to_nav(db) {
+ for nav in navs {
+ if !res.contains(&nav) {
+ res.push(nav);
+ }
}
}
};
+ let mut process_ty = |ty: hir::Type| {
+ // collect from each `ty` into the `res` result vec
+ let ty = ty.strip_references();
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push(trait_.into());
+ }
+ });
+ };
+ if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
+ if let Some(ty) = resolution.and_then(|res| match Definition::from(res) {
+ Definition::Const(it) => Some(it.ty(db)),
+ Definition::Static(it) => Some(it.ty(db)),
+ Definition::GenericParam(GenericParam::ConstParam(it)) => Some(it.ty(db)),
+ Definition::Local(it) => Some(it.ty(db)),
+ Definition::Adt(hir::Adt::Struct(it)) => Some(it.ty(db)),
+ _ => None,
+ }) {
+ process_ty(ty);
+ }
+ return Some(RangeInfo::new(range, res));
+ }
+
let range = token.text_range();
- sema.descend_into_macros(token, offset)
+ sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|token| {
let ty = sema
@@ -75,21 +107,7 @@ pub(crate) fn goto_type_definition(
});
ty
})
- .for_each(|ty| {
- // collect from each `ty` into the `res` result vec
- let ty = ty.strip_references();
- ty.walk(db, |t| {
- if let Some(adt) = t.as_adt() {
- push(adt.into());
- } else if let Some(trait_) = t.as_dyn_trait() {
- push(trait_.into());
- } else if let Some(traits) = t.as_impl_traits(db) {
- traits.for_each(|it| push(it.into()));
- } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
- push(trait_.into());
- }
- });
- });
+ .for_each(process_ty);
Some(RangeInfo::new(range, res))
}
@@ -328,4 +346,40 @@ fn foo(x$0: Bar<Baz<Foo>, Baz<usize>) {}
"#,
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+struct Bar;
+ // ^^^
+ fn test() {
+ let a = Bar;
+ format_args!("hello {a$0}");
+}
+"#,
+ );
+ check(
+ r#"
+//- minicore: fmt
+struct Bar;
+ // ^^^
+ fn test() {
+ format_args!("hello {Bar$0}");
+}
+"#,
+ );
+ check(
+ r#"
+//- minicore: fmt
+struct Bar;
+ // ^^^
+const BAR: Bar = Bar;
+fn test() {
+ format_args!("hello {BAR$0}");
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 46a0464e9..3aed007f3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -1,4 +1,6 @@
-use hir::Semantics;
+use std::iter;
+
+use hir::{DescendPreference, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
defs::{Definition, IdentClass},
@@ -15,7 +17,6 @@ use syntax::{
SyntaxKind::{self, IDENT, INT_NUMBER},
SyntaxNode, SyntaxToken, TextRange, T,
};
-use text_edit::TextSize;
use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
@@ -43,7 +44,7 @@ pub struct HighlightRelatedConfig {
//
// . if on an identifier, highlights all references to that identifier in the current file
// .. additionally, if the identifier is a trait in a where clause, type parameter trait bound or use item, highlights all references to that trait's assoc items in the corresponding scope
-// . if on an `async` or `await token, highlights all yield points for that async context
+// . if on an `async` or `await` token, highlights all yield points for that async context
// . if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
// . if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
// . if on a `move` or `|` token that belongs to a closure, highlights all captures of the closure.
@@ -116,7 +117,7 @@ fn highlight_closure_captures(
local
.sources(sema.db)
.into_iter()
- .map(|x| x.to_nav(sema.db))
+ .flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == file_id)
.filter_map(|decl| decl.focus_range)
.map(move |range| HighlightedRange { range, category })
@@ -132,7 +133,16 @@ fn highlight_references(
token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<HighlightedRange>> {
- let defs = find_defs(sema, token.clone(), offset);
+ let defs = if let Some((range, resolution)) =
+ sema.check_for_format_args_template(token.clone(), offset)
+ {
+ match resolution.map(Definition::from) {
+ Some(def) => iter::once(def).collect(),
+ None => return Some(vec![HighlightedRange { range, category: None }]),
+ }
+ } else {
+ find_defs(sema, token.clone())
+ };
let usages = defs
.iter()
.filter_map(|&d| {
@@ -206,7 +216,7 @@ fn highlight_references(
local
.sources(sema.db)
.into_iter()
- .map(|x| x.to_nav(sema.db))
+ .flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == file_id)
.filter_map(|decl| decl.focus_range)
.map(|range| HighlightedRange { range, category })
@@ -215,21 +225,27 @@ fn highlight_references(
});
}
def => {
- let hl_range = match def {
+ let navs = match def {
Definition::Module(module) => {
- Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ NavigationTarget::from_module_to_decl(sema.db, module)
+ }
+ def => match def.try_to_nav(sema.db) {
+ Some(it) => it,
+ None => continue,
+ },
+ };
+ for nav in navs {
+ if nav.file_id != file_id {
+ continue;
+ }
+ let hl_range = nav.focus_range.map(|range| {
+ let category = references::decl_mutability(&def, node, range)
+ .then_some(ReferenceCategory::Write);
+ HighlightedRange { range, category }
+ });
+ if let Some(hl_range) = hl_range {
+ res.insert(hl_range);
}
- def => def.try_to_nav(sema.db),
- }
- .filter(|decl| decl.file_id == file_id)
- .and_then(|decl| decl.focus_range)
- .map(|range| {
- let category = references::decl_mutability(&def, node, range)
- .then_some(ReferenceCategory::Write);
- HighlightedRange { range, category }
- });
- if let Some(hl_range) = hl_range {
- res.insert(hl_range);
}
}
}
@@ -456,12 +472,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
}
}
-fn find_defs(
- sema: &Semantics<'_, RootDatabase>,
- token: SyntaxToken,
- offset: TextSize,
-) -> FxHashSet<Definition> {
- sema.descend_into_macros(token, offset)
+fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
+ sema.descend_into_macros(DescendPreference::None, token)
.into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops)
@@ -1623,4 +1635,21 @@ fn f2<T: Foo>(t: T) {
"#,
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+ let a = "foo";
+ // ^
+ format_args!("hello {a} {a$0} {}", a);
+ // ^read
+ // ^read
+ // ^read
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index 21934b948..5ad119ace 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -6,7 +6,7 @@ mod tests;
use std::iter;
use either::Either;
-use hir::{db::DefDatabase, HasSource, LangItem, Semantics};
+use hir::{db::DefDatabase, DescendPreference, HasSource, LangItem, Semantics};
use ide_db::{
base_db::FileRange,
defs::{Definition, IdentClass, NameRefClass, OperatorClass},
@@ -21,6 +21,7 @@ use crate::{
doc_links::token_as_doc_comment,
markdown_remove::remove_markdown,
markup::Markup,
+ navigation_target::UpmappingResult,
runnables::{runnable_fn, runnable_mod},
FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
};
@@ -73,7 +74,7 @@ impl HoverAction {
it.module(db)?,
it.name(db).map(|name| name.display(db).to_string()),
),
- nav: it.try_to_nav(db)?,
+ nav: it.try_to_nav(db)?.call_site(),
})
})
.collect();
@@ -150,6 +151,19 @@ fn hover_simple(
});
}
+ if let Some((range, resolution)) =
+ sema.check_for_format_args_template(original_token.clone(), offset)
+ {
+ let res = hover_for_definition(
+ sema,
+ file_id,
+ Definition::from(resolution?),
+ &original_token.parent()?,
+ config,
+ )?;
+ return Some(RangeInfo::new(range, res));
+ }
+
let in_attr = original_token
.parent_ancestors()
.filter_map(ast::Item::cast)
@@ -161,11 +175,10 @@ fn hover_simple(
// prefer descending the same token kind in attribute expansions, in normal macros text
// equivalency is more important
- let descended = if in_attr {
- [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into()
- } else {
- sema.descend_into_macros_with_same_text(original_token.clone(), offset)
- };
+ let descended = sema.descend_into_macros(
+ if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText },
+ original_token.clone(),
+ );
let descended = || descended.iter();
let result = descended()
@@ -180,26 +193,24 @@ fn hover_simple(
descended()
.filter_map(|token| {
let node = token.parent()?;
- let class = IdentClass::classify_token(sema, token)?;
- if let IdentClass::Operator(OperatorClass::Await(_)) = class {
+ match IdentClass::classify_node(sema, &node)? {
// It's better for us to fall back to the keyword hover here,
// rendering poll is very confusing
- return None;
+ IdentClass::Operator(OperatorClass::Await(_)) => None,
+
+ IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
+ decl,
+ ..
+ }) => Some(vec![(Definition::ExternCrateDecl(decl), node)]),
+
+ class => Some(
+ class
+ .definitions()
+ .into_iter()
+ .zip(iter::repeat(node))
+ .collect::<Vec<_>>(),
+ ),
}
- if let IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
- decl,
- ..
- }) = class
- {
- return Some(vec![(Definition::ExternCrateDecl(decl), node)]);
- }
- Some(
- class
- .definitions()
- .into_iter()
- .zip(iter::once(node).cycle())
- .collect::<Vec<_>>(),
- )
})
.flatten()
.unique_by(|&(def, _)| def)
@@ -300,11 +311,11 @@ pub(crate) fn hover_for_definition(
sema: &Semantics<'_, RootDatabase>,
file_id: FileId,
definition: Definition,
- node: &SyntaxNode,
+ scope_node: &SyntaxNode,
config: &HoverConfig,
) -> Option<HoverResult> {
let famous_defs = match &definition {
- Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
+ Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())),
_ => None,
};
render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
@@ -332,22 +343,26 @@ fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<Hov
}
let adt = match def {
- Definition::Trait(it) => return it.try_to_nav(db).map(to_action),
+ Definition::Trait(it) => {
+ return it.try_to_nav(db).map(UpmappingResult::call_site).map(to_action)
+ }
Definition::Adt(it) => Some(it),
Definition::SelfType(it) => it.self_ty(db).as_adt(),
_ => None,
}?;
- adt.try_to_nav(db).map(to_action)
+ adt.try_to_nav(db).map(UpmappingResult::call_site).map(to_action)
}
fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
match def {
- Definition::Function(it) => it.try_to_nav(db).map(|nav_target| {
- HoverAction::Reference(FilePosition {
- file_id: nav_target.file_id,
- offset: nav_target.focus_or_full_range().start(),
+ Definition::Function(it) => {
+ it.try_to_nav(db).map(UpmappingResult::call_site).map(|nav_target| {
+ HoverAction::Reference(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
})
- }),
+ }
_ => None,
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index f72ce37d1..d0a02fd0d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -402,10 +402,9 @@ pub(super) fn definition(
|&it| it.layout(db),
|_| {
let var_def = it.parent_def(db);
- let id = it.index();
match var_def {
hir::VariantDef::Struct(s) => {
- Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(id))
+ Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(it))
}
_ => None,
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index 81d6db564..d5ec336fc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -1136,7 +1136,9 @@ impl Thing {
```
```rust
- struct Thing
+ struct Thing {
+ x: u32,
+ }
```
"#]],
);
@@ -1155,7 +1157,9 @@ impl Thing {
```
```rust
- struct Thing
+ struct Thing {
+ x: u32,
+ }
```
"#]],
);
@@ -1174,7 +1178,9 @@ impl Thing {
```
```rust
- enum Thing
+ enum Thing {
+ A,
+ }
```
"#]],
);
@@ -1193,7 +1199,9 @@ impl Thing {
```
```rust
- enum Thing
+ enum Thing {
+ A,
+ }
```
"#]],
);
@@ -2005,7 +2013,10 @@ fn test_hover_layout_of_enum() {
```
```rust
- enum Foo // size = 16 (0x10), align = 8, niches = 254
+ enum Foo {
+ Variant1(u8, u16),
+ Variant2(i32, u8, i64),
+ } // size = 16 (0x10), align = 8, niches = 254
```
"#]],
);
@@ -2346,7 +2357,7 @@ fn main() { let s$0t = S{ f1:0 }; }
focus_range: 7..8,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {\n f1: u32,\n}",
},
},
],
@@ -2379,7 +2390,7 @@ fn main() { let s$0t = S{ f1:Arg(0) }; }
focus_range: 24..25,
name: "S",
kind: Struct,
- description: "struct S<T>",
+ description: "struct S<T> {\n f1: T,\n}",
},
},
HoverGotoTypeData {
@@ -2392,7 +2403,7 @@ fn main() { let s$0t = S{ f1:Arg(0) }; }
focus_range: 7..10,
name: "Arg",
kind: Struct,
- description: "struct Arg",
+ description: "struct Arg(u32);",
},
},
],
@@ -2438,7 +2449,7 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
focus_range: 24..25,
name: "S",
kind: Struct,
- description: "struct S<T>",
+ description: "struct S<T> {\n f1: T,\n}",
},
},
HoverGotoTypeData {
@@ -2451,7 +2462,7 @@ fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
focus_range: 7..10,
name: "Arg",
kind: Struct,
- description: "struct Arg",
+ description: "struct Arg(u32);",
},
},
],
@@ -2487,7 +2498,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
focus_range: 7..8,
name: "A",
kind: Struct,
- description: "struct A",
+ description: "struct A(u32);",
},
},
HoverGotoTypeData {
@@ -2500,7 +2511,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
focus_range: 22..23,
name: "B",
kind: Struct,
- description: "struct B",
+ description: "struct B(u32);",
},
},
HoverGotoTypeData {
@@ -2514,7 +2525,7 @@ fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
name: "C",
kind: Struct,
container_name: "M",
- description: "pub struct C",
+ description: "pub struct C(u32);",
},
},
],
@@ -2704,7 +2715,7 @@ fn main() { let s$0t = foo(); }
focus_range: 39..41,
name: "S1",
kind: Struct,
- description: "struct S1",
+ description: "struct S1 {}",
},
},
HoverGotoTypeData {
@@ -2717,7 +2728,7 @@ fn main() { let s$0t = foo(); }
focus_range: 52..54,
name: "S2",
kind: Struct,
- description: "struct S2",
+ description: "struct S2 {}",
},
},
],
@@ -2808,7 +2819,7 @@ fn foo(ar$0g: &impl Foo + Bar<S>) {}
focus_range: 36..37,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -2908,7 +2919,7 @@ fn foo(ar$0g: &impl Foo<S>) {}
focus_range: 23..24,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -2945,7 +2956,7 @@ fn main() { let s$0t = foo(); }
focus_range: 49..50,
name: "B",
kind: Struct,
- description: "struct B<T>",
+ description: "struct B<T> {}",
},
},
HoverGotoTypeData {
@@ -3034,7 +3045,7 @@ fn foo(ar$0g: &dyn Foo<S>) {}
focus_range: 23..24,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -3082,7 +3093,7 @@ fn foo(a$0rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {}
focus_range: 50..51,
name: "B",
kind: Struct,
- description: "struct B<T>",
+ description: "struct B<T> {}",
},
},
HoverGotoTypeData {
@@ -3108,7 +3119,7 @@ fn foo(a$0rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {}
focus_range: 65..66,
name: "S",
kind: Struct,
- description: "struct S",
+ description: "struct S {}",
},
},
],
@@ -3335,7 +3346,7 @@ struct S$0T<const C: usize = 1, T = Foo>(T);
```
```rust
- struct ST<const C: usize = 1, T = Foo>
+ struct ST<const C: usize = 1, T = Foo>(T);
```
"#]],
);
@@ -3356,7 +3367,7 @@ struct S$0T<const C: usize = {40 + 2}, T = Foo>(T);
```
```rust
- struct ST<const C: usize = {const}, T = Foo>
+ struct ST<const C: usize = {const}, T = Foo>(T);
```
"#]],
);
@@ -3378,7 +3389,7 @@ struct S$0T<const C: usize = VAL, T = Foo>(T);
```
```rust
- struct ST<const C: usize = VAL, T = Foo>
+ struct ST<const C: usize = VAL, T = Foo>(T);
```
"#]],
);
@@ -5266,38 +5277,46 @@ pub fn foo() {}
#[test]
fn hover_feature() {
check(
- r#"#![feature(box_syntax$0)]"#,
- expect![[r##"
- *box_syntax*
- ```
- box_syntax
- ```
- ___
+ r#"#![feature(intrinsics$0)]"#,
+ expect![[r#"
+ *intrinsics*
+ ```
+ intrinsics
+ ```
+ ___
- # `box_syntax`
+ # `intrinsics`
- The tracking issue for this feature is: [#49733]
+ The tracking issue for this feature is: None.
- [#49733]: https://github.com/rust-lang/rust/issues/49733
+ Intrinsics are never intended to be stable directly, but intrinsics are often
+ exported in some sort of stable manner. Prefer using the stable interfaces to
+ the intrinsic directly when you can.
- See also [`box_patterns`](box-patterns.md)
+ ------------------------
- ------------------------
- Currently the only stable way to create a `Box` is via the `Box::new` method.
- Also it is not possible in stable Rust to destructure a `Box` in a match
- pattern. The unstable `box` keyword can be used to create a `Box`. An example
- usage would be:
+ These are imported as if they were FFI functions, with the special
+ `rust-intrinsic` ABI. For example, if one was in a freestanding
+ context, but wished to be able to `transmute` between types, and
+ perform efficient pointer arithmetic, one would import those functions
+ via a declaration like
- ```rust
- #![feature(box_syntax)]
+ ```rust
+ #![feature(intrinsics)]
+ #![allow(internal_features)]
+ # fn main() {}
- fn main() {
- let b = box 5;
- }
- ```
+ extern "rust-intrinsic" {
+ fn transmute<T, U>(x: T) -> U;
- "##]],
+ fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
+ }
+ ```
+
+ As with any other FFI functions, these are always `unsafe` to call.
+
+ "#]],
)
}
@@ -5927,7 +5946,7 @@ pub struct Foo(i32);
```
```rust
- pub struct Foo // size = 4, align = 4
+ pub struct Foo(i32); // size = 4, align = 4
```
---
@@ -6594,3 +6613,115 @@ fn test() {
"#]],
);
}
+
+#[test]
+fn format_args_implicit() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+let aaaaa = "foo";
+format_args!("{aaaaa$0}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn format_args_implicit2() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+let aaaaa = "foo";
+format_args!("{$0aaaaa}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn format_args_implicit_raw() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+let aaaaa = "foo";
+format_args!(r"{$0aaaaa}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn format_args_implicit_nested() {
+ check(
+ r#"
+//- minicore: fmt
+macro_rules! foo {
+ ($($tt:tt)*) => {
+ format_args!($($tt)*)
+ }
+}
+fn test() {
+let aaaaa = "foo";
+foo!(r"{$0aaaaa}");
+}
+"#,
+ expect![[r#"
+ *aaaaa*
+
+ ```rust
+ let aaaaa: &str // size = 16 (0x10), align = 8, niches = 1
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn method_call_without_parens() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo<T>(&self, t: T) {}
+}
+
+fn main() {
+ S.foo$0;
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ fn foo<T>(&self, t: T)
+ ```
+ "#]],
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index a5d070fe7..e82d730e4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -31,6 +31,7 @@ mod discriminant;
mod fn_lifetime_fn;
mod implicit_static;
mod param_name;
+mod implicit_drop;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct InlayHintsConfig {
@@ -45,6 +46,7 @@ pub struct InlayHintsConfig {
pub closure_return_type_hints: ClosureReturnTypeHints,
pub closure_capture_hints: bool,
pub binding_mode_hints: bool,
+ pub implicit_drop_hints: bool,
pub lifetime_elision_hints: LifetimeElisionHints,
pub param_names_for_lifetime_elision_hints: bool,
pub hide_named_constructor_hints: bool,
@@ -124,6 +126,7 @@ pub enum InlayKind {
Lifetime,
Parameter,
Type,
+ Drop,
}
#[derive(Debug)]
@@ -312,6 +315,7 @@ impl HirWrite for InlayHintLabelBuilder<'_> {
}
self.make_new_part();
let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return };
+ let location = location.call_site();
let location =
FileRange { file_id: location.file_id, range: location.focus_or_full_range() };
self.location = Some(location);
@@ -418,6 +422,11 @@ fn ty_to_text_edit(
Some(builder.finish())
}
+pub enum RangeLimit {
+ Fixed(TextRange),
+ NearestParent(TextSize),
+}
+
// Feature: Inlay Hints
//
// rust-analyzer shows additional information inline with the source code.
@@ -439,7 +448,7 @@ fn ty_to_text_edit(
pub(crate) fn inlay_hints(
db: &RootDatabase,
file_id: FileId,
- range_limit: Option<TextRange>,
+ range_limit: Option<RangeLimit>,
config: &InlayHintsConfig,
) -> Vec<InlayHint> {
let _p = profile::span("inlay_hints");
@@ -454,13 +463,31 @@ pub(crate) fn inlay_hints(
let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
match range_limit {
- Some(range) => match file.covering_element(range) {
+ Some(RangeLimit::Fixed(range)) => match file.covering_element(range) {
NodeOrToken::Token(_) => return acc,
NodeOrToken::Node(n) => n
.descendants()
.filter(|descendant| range.intersect(descendant.text_range()).is_some())
.for_each(hints),
},
+ Some(RangeLimit::NearestParent(position)) => {
+ match file.token_at_offset(position).left_biased() {
+ Some(token) => {
+ if let Some(parent_block) =
+ token.parent_ancestors().find_map(ast::BlockExpr::cast)
+ {
+ parent_block.syntax().descendants().for_each(hints)
+ } else if let Some(parent_item) =
+ token.parent_ancestors().find_map(ast::Item::cast)
+ {
+ parent_item.syntax().descendants().for_each(hints)
+ } else {
+ return acc;
+ }
+ }
+ None => return acc,
+ }
+ }
None => file.descendants().for_each(hints),
};
}
@@ -503,7 +530,10 @@ fn hints(
ast::Item(it) => match it {
// FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints
ast::Item::Impl(_) => None,
- ast::Item::Fn(it) => fn_lifetime_fn::hints(hints, config, it),
+ ast::Item::Fn(it) => {
+ implicit_drop::hints(hints, sema, config, &it);
+ fn_lifetime_fn::hints(hints, config, it)
+ },
// static type elisions
ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)),
ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)),
@@ -563,6 +593,7 @@ mod tests {
use hir::ClosureStyle;
use itertools::Itertools;
use test_utils::extract_annotations;
+ use text_edit::{TextRange, TextSize};
use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode};
use crate::DiscriminantHints;
@@ -590,6 +621,7 @@ mod tests {
max_length: None,
closing_brace_hints_min_lines: None,
fields_to_resolve: InlayFieldsToResolve::empty(),
+ implicit_drop_hints: false,
};
pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
type_hints: true,
@@ -629,6 +661,22 @@ mod tests {
expect.assert_debug_eq(&inlay_hints)
}
+ #[track_caller]
+ pub(super) fn check_expect_clear_loc(
+ config: InlayHintsConfig,
+ ra_fixture: &str,
+ expect: Expect,
+ ) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| {
+ if let Some(loc) = &mut hint.linked_location {
+ loc.range = TextRange::empty(TextSize::from(0));
+ }
+ });
+ expect.assert_debug_eq(&inlay_hints)
+ }
+
/// Computes inlay hints for the fixture, applies all the provided text edits and then runs
/// expect test.
#[track_caller]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
index 680035c72..45b51e355 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
@@ -177,7 +177,11 @@ mod tests {
use syntax::{TextRange, TextSize};
use test_utils::extract_annotations;
- use crate::{fixture, inlay_hints::InlayHintsConfig, ClosureReturnTypeHints};
+ use crate::{
+ fixture,
+ inlay_hints::{InlayHintsConfig, RangeLimit},
+ ClosureReturnTypeHints,
+ };
use crate::inlay_hints::tests::{
check, check_edit, check_no_edit, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
@@ -400,7 +404,7 @@ fn main() {
.inlay_hints(
&InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
file_id,
- Some(TextRange::new(TextSize::from(500), TextSize::from(600))),
+ Some(RangeLimit::Fixed(TextRange::new(TextSize::from(500), TextSize::from(600)))),
)
.unwrap();
let actual =
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
index 12e46c0f8..c9e9a2237 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
@@ -78,7 +78,9 @@ mod tests {
use expect_test::expect;
use crate::{
- inlay_hints::tests::{check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG},
+ inlay_hints::tests::{
+ check_expect, check_expect_clear_loc, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
+ },
InlayHintsConfig,
};
@@ -444,7 +446,7 @@ fn main() {
#[test]
fn shorten_iterator_chaining_hints() {
- check_expect(
+ check_expect_clear_loc(
InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
r#"
//- minicore: iterators
@@ -484,7 +486,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10739..10747,
+ range: 0..0,
},
),
tooltip: "",
@@ -497,7 +499,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10771..10775,
+ range: 0..0,
},
),
tooltip: "",
@@ -522,7 +524,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10739..10747,
+ range: 0..0,
},
),
tooltip: "",
@@ -535,7 +537,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10771..10775,
+ range: 0..0,
},
),
tooltip: "",
@@ -560,7 +562,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10739..10747,
+ range: 0..0,
},
),
tooltip: "",
@@ -573,7 +575,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 10771..10775,
+ range: 0..0,
},
),
tooltip: "",
@@ -598,7 +600,7 @@ fn main() {
file_id: FileId(
0,
),
- range: 24..30,
+ range: 0..0,
},
),
tooltip: "",
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
index d691303c1..2f8b95951 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_captures.rs
@@ -2,6 +2,7 @@
//!
//! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::{base_db::FileId, famous_defs::FamousDefs};
+use stdx::TupleExt;
use syntax::ast::{self, AstNode};
use text_edit::{TextRange, TextSize};
@@ -73,7 +74,9 @@ pub(super) fn hints(
capture.display_place(sema.db)
),
None,
- source.name().and_then(|name| name.syntax().original_file_range_opt(sema.db)),
+ source.name().and_then(|name| {
+ name.syntax().original_file_range_opt(sema.db).map(TupleExt::head)
+ }),
);
acc.push(InlayHint {
needs_resolve: label.needs_resolve(),
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs
new file mode 100644
index 000000000..9cbaed090
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -0,0 +1,218 @@
+//! Implementation of "implicit drop" inlay hints:
+//! ```no_run
+//! fn main() {
+//! let x = vec![2];
+//! if some_condition() {
+//! /* drop(x) */return;
+//! }
+//! }
+//! ```
+use hir::{
+ db::{DefDatabase as _, HirDatabase as _},
+ mir::{MirSpan, TerminatorKind},
+ ChalkTyInterner, DefWithBody, Semantics,
+};
+use ide_db::{base_db::FileRange, RootDatabase};
+
+use syntax::{
+ ast::{self, AstNode},
+ match_ast,
+};
+
+use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
+
+pub(super) fn hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ def: &ast::Fn,
+) -> Option<()> {
+ if !config.implicit_drop_hints {
+ return None;
+ }
+
+ let def = sema.to_def(def)?;
+ let def: DefWithBody = def.into();
+
+ let source_map = sema.db.body_with_source_map(def.into()).1;
+
+ let hir = sema.db.body(def.into());
+ let mir = sema.db.mir_body(def.into()).ok()?;
+
+ let local_to_binding = mir.local_to_binding_map();
+
+ for (_, bb) in mir.basic_blocks.iter() {
+ let terminator = bb.terminator.as_ref()?;
+ if let TerminatorKind::Drop { place, .. } = terminator.kind {
+ if !place.projection.is_empty() {
+ continue; // Ignore complex cases for now
+ }
+ if mir.locals[place.local].ty.adt_id(ChalkTyInterner).is_none() {
+ continue; // Arguably only ADTs have significant drop impls
+ }
+ let Some(binding) = local_to_binding.get(place.local) else {
+ continue; // Ignore temporary values
+ };
+ let range = match terminator.span {
+ MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
+ Ok(s) => {
+ let root = &s.file_syntax(sema.db);
+ let expr = s.value.to_node(root);
+ let expr = expr.syntax();
+ match_ast! {
+ match expr {
+ ast::BlockExpr(x) => x.stmt_list().and_then(|x| x.r_curly_token()).map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()),
+ // make the inlay hint appear after the semicolon if there is
+ _ => {
+ let nearest_semicolon = nearest_token_after_node(expr, syntax::SyntaxKind::SEMICOLON);
+ nearest_semicolon.map(|x| x.text_range()).unwrap_or_else(|| expr.text_range())
+ },
+ }
+ }
+ }
+ Err(_) => continue,
+ },
+ MirSpan::PatId(p) => match source_map.pat_syntax(p) {
+ Ok(s) => s.value.text_range(),
+ Err(_) => continue,
+ },
+ MirSpan::Unknown => continue,
+ };
+ let binding = &hir.bindings[*binding];
+ let binding_source = binding
+ .definitions
+ .first()
+ .and_then(|d| source_map.pat_syntax(*d).ok())
+ .and_then(|d| {
+ Some(FileRange { file_id: d.file_id.file_id()?, range: d.value.text_range() })
+ });
+ let name = binding.name.to_smol_str();
+ if name.starts_with("<ra@") {
+ continue; // Ignore desugared variables
+ }
+ let mut label = InlayHintLabel::simple(
+ name,
+ Some(crate::InlayTooltip::String("moz".into())),
+ binding_source,
+ );
+ label.prepend_str("drop(");
+ label.append_str(")");
+ acc.push(InlayHint {
+ range,
+ position: InlayHintPosition::After,
+ pad_left: true,
+ pad_right: true,
+ kind: InlayKind::Drop,
+ needs_resolve: label.needs_resolve(),
+ label,
+ text_edit: None,
+ })
+ }
+ }
+
+ Some(())
+}
+
+fn nearest_token_after_node(
+ node: &syntax::SyntaxNode,
+ token_type: syntax::SyntaxKind,
+) -> Option<syntax::SyntaxToken> {
+ node.siblings_with_tokens(syntax::Direction::Next)
+ .filter_map(|it| it.as_token().map(|it| it.clone()))
+ .filter(|it| it.kind() == token_type)
+ .next()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
+ InlayHintsConfig,
+ };
+
+ const ONLY_DROP_CONFIG: InlayHintsConfig =
+ InlayHintsConfig { implicit_drop_hints: true, ..DISABLED_CONFIG };
+
+ #[test]
+ fn basic() {
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+ struct X;
+ fn f() {
+ let x = X;
+ if 2 == 5 {
+ return;
+ //^ drop(x)
+ }
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+
+ #[test]
+ fn no_hint_for_copy_types_and_mutable_references() {
+ // `T: Copy` and `T = &mut U` types do nothing on drop, so we should hide drop inlay hint for them.
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+//- minicore: copy, derive
+
+ struct X(i32, i32);
+ #[derive(Clone, Copy)]
+ struct Y(i32, i32);
+ fn f() {
+ let a = 2;
+ let b = a + 4;
+ let mut x = X(a, b);
+ let mut y = Y(a, b);
+ let mx = &mut x;
+ let my = &mut y;
+ let c = a + b;
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+
+ #[test]
+ fn try_operator() {
+ // We currently show drop inlay hint for every `?` operator that may potentialy drop something. We probably need to
+ // make it configurable as it doesn't seem very useful.
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+//- minicore: copy, try, option
+
+ struct X;
+ fn f() -> Option<()> {
+ let x = X;
+ let t_opt = Some(2);
+ let t = t_opt?;
+ //^ drop(x)
+ Some(())
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+
+ #[test]
+ fn if_let() {
+ check_with_config(
+ ONLY_DROP_CONFIG,
+ r#"
+ struct X;
+ fn f() {
+ let x = X;
+ if let X = x {
+ let y = X;
+ }
+ //^ drop(y)
+ }
+ //^ drop(x)
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
index d06ffd535..216974904 100644
--- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
@@ -1,10 +1,10 @@
use hir::Semantics;
-use ide_db::base_db::SourceDatabaseExt;
-use ide_db::RootDatabase;
-use ide_db::{base_db::FilePosition, LineIndexDatabase};
+use ide_db::{
+ base_db::{FilePosition, SourceDatabaseExt},
+ LineIndexDatabase, RootDatabase,
+};
use std::{fmt::Write, time::Instant};
-use syntax::TextRange;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
// Feature: Interpret Function
//
@@ -28,7 +28,9 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
- let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let item = ancestors_at_offset(source_file.syntax(), position.offset)
+ .filter(|it| !ast::MacroCall::can_cast(it.kind()))
+ .find_map(ast::Item::cast)?;
let def = match item {
ast::Item::Fn(it) => sema.to_def(&it)?,
_ => return None,
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index aee03d218..a19952e4c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -8,8 +8,9 @@
//! in this crate.
// For proving that RootDatabase is RefUnwindSafe.
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![recursion_limit = "128"]
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#[allow(unused)]
macro_rules! eprintln {
@@ -93,13 +94,13 @@ pub use crate::{
inlay_hints::{
AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints,
InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintPosition,
- InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints,
+ InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, RangeLimit,
},
join_lines::JoinLinesConfig,
markup::Markup,
moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation},
move_item::Direction,
- navigation_target::NavigationTarget,
+ navigation_target::{NavigationTarget, UpmappingResult},
prime_caches::ParallelPrimeCachesProgress,
references::ReferenceSearchResult,
rename::RenameError,
@@ -132,7 +133,9 @@ pub use ide_db::{
symbol_index::Query,
RootDatabase, SymbolKind,
};
-pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
+pub use ide_diagnostics::{
+ Diagnostic, DiagnosticCode, DiagnosticsConfig, ExprFillDefaultMode, Severity,
+};
pub use ide_ssr::SsrError;
pub use syntax::{TextRange, TextSize};
pub use text_edit::{Indel, TextEdit};
@@ -229,7 +232,7 @@ impl Analysis {
// `AnalysisHost` for creating a fully-featured analysis.
pub fn from_single_file(text: String) -> (Analysis, FileId) {
let mut host = AnalysisHost::default();
- let file_id = FileId(0);
+ let file_id = FileId::from_raw(0);
let mut file_set = FileSet::default();
file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
let source_root = SourceRoot::new_local(file_set);
@@ -396,7 +399,7 @@ impl Analysis {
&self,
config: &InlayHintsConfig,
file_id: FileId,
- range: Option<TextRange>,
+ range: Option<RangeLimit>,
) -> Cancellable<Vec<InlayHint>> {
self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
}
@@ -412,6 +415,7 @@ impl Analysis {
symbol_index::world_symbols(db, query)
.into_iter() // xx: should we make this a par iter?
.filter_map(|s| s.try_to_nav(db))
+ .map(UpmappingResult::call_site)
.collect::<Vec<_>>()
})
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
index 2ca2b5b1d..8e8bb5e01 100644
--- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -1,7 +1,7 @@
//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
//! for LSIF and LSP.
-use hir::{AsAssocItem, AssocItemContainer, Crate, Semantics};
+use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics};
use ide_db::{
base_db::{CrateOrigin, FilePosition, LangCrateOrigin},
defs::{Definition, IdentClass},
@@ -99,7 +99,7 @@ pub(crate) fn moniker(
});
}
let navs = sema
- .descend_into_macros(original_token.clone(), offset)
+ .descend_into_macros(DescendPreference::None, original_token.clone())
.into_iter()
.filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index 32f211c6b..6cb7d7724 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -2,10 +2,11 @@
use std::fmt;
+use arrayvec::ArrayVec;
use either::Either;
use hir::{
- symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, HirDisplay, HirFileId,
- InFile, LocalSource, ModuleSource,
+ db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource,
+ HirDisplay, HirFileId, InFile, LocalSource, ModuleSource,
};
use ide_db::{
base_db::{FileId, FileRange},
@@ -40,6 +41,8 @@ pub struct NavigationTarget {
/// comments, and `focus_range` is the range of the identifier.
///
/// Clients should place the cursor on this range when navigating to this target.
+ ///
+ /// This range must be contained within [`Self::full_range`].
pub focus_range: Option<TextRange>,
pub name: SmolStr,
pub kind: Option<SymbolKind>,
@@ -70,15 +73,15 @@ impl fmt::Debug for NavigationTarget {
}
pub(crate) trait ToNav {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget;
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget>;
}
pub(crate) trait TryToNav {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget>;
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>>;
}
impl<T: TryToNav, U: TryToNav> TryToNav for Either<T, U> {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
Either::Left(it) => it.try_to_nav(db),
Either::Right(it) => it.try_to_nav(db),
@@ -91,23 +94,30 @@ impl NavigationTarget {
self.focus_range.unwrap_or(self.full_range)
}
- pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
+ pub(crate) fn from_module_to_decl(
+ db: &RootDatabase,
+ module: hir::Module,
+ ) -> UpmappingResult<NavigationTarget> {
let name = module.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
- if let Some(InFile { value, file_id }) = &module.declaration_source(db) {
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, *file_id, value.syntax(), value.name());
- let mut res = NavigationTarget::from_syntax(
- file_id,
- name,
- focus_range,
- full_range,
- SymbolKind::Module,
- );
- res.docs = module.docs(db);
- res.description = Some(module.display(db).to_string());
- return res;
+ match module.declaration_source(db) {
+ Some(InFile { value, file_id }) => {
+ orig_range_with_focus(db, file_id, value.syntax(), value.name()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ name.clone(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+ res.docs = module.docs(db);
+ res.description = Some(module.display(db).to_string());
+ res
+ },
+ )
+ }
+ _ => module.to_nav(db),
}
- module.to_nav(db)
}
#[cfg(test)]
@@ -133,13 +143,14 @@ impl NavigationTarget {
db: &RootDatabase,
InFile { file_id, value }: InFile<&dyn ast::HasName>,
kind: SymbolKind,
- ) -> NavigationTarget {
- let name = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
+ ) -> UpmappingResult<NavigationTarget> {
+ let name: SmolStr = value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), value.name());
-
- NavigationTarget::from_syntax(file_id, name, focus_range, full_range, kind)
+ orig_range_with_focus(db, file_id, value.syntax(), value.name()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(file_id, name.clone(), focus_range, full_range, kind)
+ },
+ )
}
fn from_syntax(
@@ -164,48 +175,51 @@ impl NavigationTarget {
}
impl TryToNav for FileSymbol {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
- let full_range = self.loc.original_range(db);
- let focus_range = self.loc.original_name_range(db).and_then(|it| {
- if it.file_id == full_range.file_id {
- Some(it.range)
- } else {
- None
- }
- });
-
- Some(NavigationTarget {
- file_id: full_range.file_id,
- name: self
- .is_alias
- .then(|| self.def.name(db))
- .flatten()
- .map_or_else(|| self.name.clone(), |it| it.to_smol_str()),
- alias: self.is_alias.then(|| self.name.clone()),
- kind: Some(hir::ModuleDefId::from(self.def).into()),
- full_range: full_range.range,
- focus_range,
- container_name: self.container_name.clone(),
- description: match self.def {
- hir::ModuleDef::Module(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Function(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Const(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Static(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()),
- hir::ModuleDef::BuiltinType(_) => None,
- },
- docs: None,
- })
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
+ let root = db.parse_or_expand(self.loc.hir_file_id);
+ self.loc.ptr.to_node(&root);
+ Some(
+ orig_range_with_focus(
+ db,
+ self.loc.hir_file_id,
+ &self.loc.ptr.to_node(&root),
+ Some(self.loc.name_ptr.to_node(&root)),
+ )
+ .map(|(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget {
+ file_id,
+ name: self
+ .is_alias
+ .then(|| self.def.name(db))
+ .flatten()
+ .map_or_else(|| self.name.clone(), |it| it.to_smol_str()),
+ alias: self.is_alias.then(|| self.name.clone()),
+ kind: Some(hir::ModuleDefId::from(self.def).into()),
+ full_range,
+ focus_range,
+ container_name: self.container_name.clone(),
+ description: match self.def {
+ hir::ModuleDef::Module(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Function(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Const(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Static(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()),
+ hir::ModuleDef::BuiltinType(_) => None,
+ },
+ docs: None,
+ }
+ }),
+ )
}
}
impl TryToNav for Definition {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
Definition::Local(it) => Some(it.to_nav(db)),
Definition::Label(it) => Some(it.to_nav(db)),
@@ -233,7 +247,7 @@ impl TryToNav for Definition {
}
impl TryToNav for hir::ModuleDef {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
hir::ModuleDef::Module(it) => Some(it.to_nav(db)),
hir::ModuleDef::Function(it) => it.try_to_nav(db),
@@ -331,22 +345,26 @@ where
D: HasSource + ToNavFromAst + Copy + HasDocs + HirDisplay,
D::Ast: ast::HasName,
{
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
- let mut res = NavigationTarget::from_named(
- db,
- src.as_ref().map(|it| it as &dyn ast::HasName),
- D::KIND,
- );
- res.docs = self.docs(db);
- res.description = Some(self.display(db).to_string());
- res.container_name = self.container_name(db);
- Some(res)
+ Some(
+ NavigationTarget::from_named(
+ db,
+ src.as_ref().map(|it| it as &dyn ast::HasName),
+ D::KIND,
+ )
+ .map(|mut res| {
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res.container_name = self.container_name(db);
+ res
+ }),
+ )
}
}
impl ToNav for hir::Module {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
let InFile { file_id, value } = self.definition_source(db);
let name = self.name(db).map(|it| it.to_smol_str()).unwrap_or_default();
@@ -355,97 +373,125 @@ impl ToNav for hir::Module {
ModuleSource::Module(node) => (node.syntax(), node.name()),
ModuleSource::BlockExpr(node) => (node.syntax(), None),
};
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
- NavigationTarget::from_syntax(file_id, name, focus_range, full_range, SymbolKind::Module)
+
+ orig_range_with_focus(db, file_id, syntax, focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(
+ file_id,
+ name.clone(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ )
+ },
+ )
}
}
impl TryToNav for hir::Impl {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.source(db)?;
- let derive_attr = self.as_builtin_derive(db);
+ let derive_path = self.as_builtin_derive_path(db);
- let (focus, syntax) = match &derive_attr {
- Some(attr) => (None, attr.value.syntax()),
- None => (value.self_ty(), value.syntax()),
+ let (file_id, focus, syntax) = match &derive_path {
+ Some(attr) => (attr.file_id.into(), None, attr.value.syntax()),
+ None => (file_id, value.self_ty(), value.syntax()),
};
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
- Some(NavigationTarget::from_syntax(
- file_id,
- "impl".into(),
- focus_range,
- full_range,
- SymbolKind::Impl,
+ Some(orig_range_with_focus(db, file_id, syntax, focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(
+ file_id,
+ "impl".into(),
+ focus_range,
+ full_range,
+ SymbolKind::Impl,
+ )
+ },
))
}
}
impl TryToNav for hir::ExternCrateDecl {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
let InFile { file_id, value } = src;
let focus = value
.rename()
.map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right));
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), focus);
- let mut res = NavigationTarget::from_syntax(
- file_id,
- self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(),
- focus_range,
- full_range,
- SymbolKind::Module,
- );
- res.docs = self.docs(db);
- res.description = Some(self.display(db).to_string());
- res.container_name = container_name(db, *self);
- Some(res)
+ Some(orig_range_with_focus(db, file_id, value.syntax(), focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res.container_name = container_name(db, *self);
+ res
+ },
+ ))
}
}
impl TryToNav for hir::Field {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
let field_source = match &src.value {
FieldSource::Named(it) => {
- let mut res =
- NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field);
- res.docs = self.docs(db);
- res.description = Some(self.display(db).to_string());
- res
- }
- FieldSource::Pos(it) => {
- let FileRange { file_id, range } =
- src.with_value(it.syntax()).original_file_range(db);
- NavigationTarget::from_syntax(file_id, "".into(), None, range, SymbolKind::Field)
+ NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map(
+ |mut res| {
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res
+ },
+ )
}
+ FieldSource::Pos(it) => orig_range(db, src.file_id, it.syntax()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ NavigationTarget::from_syntax(
+ file_id,
+ format!("{}", self.index()).into(),
+ focus_range,
+ full_range,
+ SymbolKind::Field,
+ )
+ },
+ ),
};
Some(field_source)
}
}
impl TryToNav for hir::Macro {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let src = self.source(db)?;
let name_owner: &dyn ast::HasName = match &src.value {
Either::Left(it) => it,
Either::Right(it) => it,
};
- let mut res = NavigationTarget::from_named(
- db,
- src.as_ref().with_value(name_owner),
- self.kind(db).into(),
- );
- res.docs = self.docs(db);
- Some(res)
+ Some(
+ NavigationTarget::from_named(
+ db,
+ src.as_ref().with_value(name_owner),
+ self.kind(db).into(),
+ )
+ .map(|mut res| {
+ res.docs = self.docs(db);
+ res
+ }),
+ )
}
}
impl TryToNav for hir::Adt {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
hir::Adt::Struct(it) => it.try_to_nav(db),
hir::Adt::Union(it) => it.try_to_nav(db),
@@ -455,7 +501,7 @@ impl TryToNav for hir::Adt {
}
impl TryToNav for hir::AssocItem {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
AssocItem::Function(it) => it.try_to_nav(db),
AssocItem::Const(it) => it.try_to_nav(db),
@@ -465,7 +511,7 @@ impl TryToNav for hir::AssocItem {
}
impl TryToNav for hir::GenericParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
match self {
hir::GenericParam::TypeParam(it) => it.try_to_nav(db),
hir::GenericParam::ConstParam(it) => it.try_to_nav(db),
@@ -475,7 +521,7 @@ impl TryToNav for hir::GenericParam {
}
impl ToNav for LocalSource {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
let InFile { file_id, value } = &self.source;
let file_id = *file_id;
let local = self.local;
@@ -484,60 +530,61 @@ impl ToNav for LocalSource {
Either::Right(it) => (it.syntax(), it.name()),
};
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, node, name);
-
- let name = local.name(db).to_smol_str();
- let kind = if local.is_self(db) {
- SymbolKind::SelfParam
- } else if local.is_param(db) {
- SymbolKind::ValueParam
- } else {
- SymbolKind::Local
- };
- NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(kind),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- }
+ orig_range_with_focus(db, file_id, node, name).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| {
+ let name = local.name(db).to_smol_str();
+ let kind = if local.is_self(db) {
+ SymbolKind::SelfParam
+ } else if local.is_param(db) {
+ SymbolKind::ValueParam
+ } else {
+ SymbolKind::Local
+ };
+ NavigationTarget {
+ file_id,
+ name,
+ alias: None,
+ kind: Some(kind),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ }
+ },
+ )
}
}
impl ToNav for hir::Local {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
self.primary_source(db).to_nav(db)
}
}
impl ToNav for hir::Label {
- fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
+ fn to_nav(&self, db: &RootDatabase) -> UpmappingResult<NavigationTarget> {
let InFile { file_id, value } = self.source(db);
let name = self.name(db).to_smol_str();
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), value.lifetime());
-
- NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::Label),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- }
+ orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::Label),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ )
}
}
impl TryToNav for hir::TypeParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.merge().source(db)?;
let name = self.name(db).to_smol_str();
@@ -556,51 +603,51 @@ impl TryToNav for hir::TypeParam {
};
let focus = value.as_ref().either(|it| it.name(), |it| it.name());
- let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
-
- Some(NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::TypeParam),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- })
+ Some(orig_range_with_focus(db, file_id, syntax, focus).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::TypeParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ ))
}
}
impl TryToNav for hir::TypeOrConstParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
self.split(db).try_to_nav(db)
}
}
impl TryToNav for hir::LifetimeParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.source(db)?;
let name = self.name(db).to_smol_str();
- let FileRange { file_id, range } =
- InFile::new(file_id, value.syntax()).original_file_range(db);
- Some(NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::LifetimeParam),
- full_range: range,
- focus_range: Some(range),
- container_name: None,
- description: None,
- docs: None,
- })
+ Some(orig_range(db, file_id, value.syntax()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::LifetimeParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ ))
}
}
impl TryToNav for hir::ConstParam {
- fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<UpmappingResult<NavigationTarget>> {
let InFile { file_id, value } = self.merge().source(db)?;
let name = self.name(db).to_smol_str();
@@ -612,35 +659,178 @@ impl TryToNav for hir::ConstParam {
}
};
- let (file_id, full_range, focus_range) =
- orig_range_with_focus(db, file_id, value.syntax(), value.name());
- Some(NavigationTarget {
- file_id,
- name,
- alias: None,
- kind: Some(SymbolKind::ConstParam),
- full_range,
- focus_range,
- container_name: None,
- description: None,
- docs: None,
- })
+ Some(orig_range_with_focus(db, file_id, value.syntax(), value.name()).map(
+ |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget {
+ file_id,
+ name: name.clone(),
+ alias: None,
+ kind: Some(SymbolKind::ConstParam),
+ full_range,
+ focus_range,
+ container_name: None,
+ description: None,
+ docs: None,
+ },
+ ))
+ }
+}
+
+#[derive(Debug)]
+pub struct UpmappingResult<T> {
+ /// The macro call site.
+ pub call_site: T,
+ /// The macro definition site, if relevant.
+ pub def_site: Option<T>,
+}
+
+impl<T> UpmappingResult<T> {
+ pub fn call_site(self) -> T {
+ self.call_site
+ }
+
+ pub fn collect<FI: FromIterator<T>>(self) -> FI {
+ FI::from_iter(self.into_iter())
+ }
+}
+
+impl<T> IntoIterator for UpmappingResult<T> {
+ type Item = T;
+
+ type IntoIter = <ArrayVec<T, 2> as IntoIterator>::IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.def_site
+ .into_iter()
+ .chain(Some(self.call_site))
+ .collect::<ArrayVec<_, 2>>()
+ .into_iter()
}
}
+impl<T> UpmappingResult<T> {
+ fn map<U>(self, f: impl Fn(T) -> U) -> UpmappingResult<U> {
+ UpmappingResult { call_site: f(self.call_site), def_site: self.def_site.map(f) }
+ }
+}
+
+/// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions
+/// May return two results if the mapped node originates from a macro definition in which case the
+/// second result is the creating macro call.
fn orig_range_with_focus(
db: &RootDatabase,
hir_file: HirFileId,
value: &SyntaxNode,
name: Option<impl AstNode>,
-) -> (FileId, TextRange, Option<TextRange>) {
- let FileRange { file_id, range: full_range } =
- InFile::new(hir_file, value).original_file_range(db);
- let focus_range = name
- .and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db))
- .and_then(|range| if range.file_id == file_id { Some(range.range) } else { None });
-
- (file_id, full_range, focus_range)
+) -> UpmappingResult<(FileRange, Option<TextRange>)> {
+ let Some(name) = name else { return orig_range(db, hir_file, value) };
+
+ let call_range = || {
+ db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
+ .kind
+ .original_call_range(db)
+ };
+
+ let def_range = || {
+ db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
+ .def
+ .definition_range(db)
+ };
+
+ let value_range = InFile::new(hir_file, value).original_file_range_opt(db);
+ let ((call_site_range, call_site_focus), def_site) =
+ match InFile::new(hir_file, name.syntax()).original_file_range_opt(db) {
+ // call site name
+ Some((focus_range, ctxt)) if ctxt.is_root() => {
+ // Try to upmap the node as well, if it ends up in the def site, go back to the call site
+ (
+ (
+ match value_range {
+ // name is in the node in the macro input so we can return it
+ Some((range, ctxt))
+ if ctxt.is_root()
+ && range.file_id == focus_range.file_id
+ && range.range.contains_range(focus_range.range) =>
+ {
+ range
+ }
+ // name lies outside the node, so instead point to the macro call which
+ // *should* contain the name
+ _ => call_range(),
+ },
+ Some(focus_range),
+ ),
+ // no def site relevant
+ None,
+ )
+ }
+
+ // def site name
+ // FIXME: This can be de improved
+ Some((focus_range, _ctxt)) => {
+ match value_range {
+ // but overall node is in macro input
+ Some((range, ctxt)) if ctxt.is_root() => (
+ // node mapped up in call site, show the node
+ (range, None),
+ // def site, if the name is in the (possibly) upmapped def site range, show the
+ // def site
+ {
+ let (def_site, _) = def_range().original_node_file_range(db);
+ (def_site.file_id == focus_range.file_id
+ && def_site.range.contains_range(focus_range.range))
+ .then_some((def_site, Some(focus_range)))
+ },
+ ),
+ // node is in macro def, just show the focus
+ _ => (
+ // show the macro call
+ (call_range(), None),
+ Some((focus_range, Some(focus_range))),
+ ),
+ }
+ }
+ // lost name? can't happen for single tokens
+ None => return orig_range(db, hir_file, value),
+ };
+
+ UpmappingResult {
+ call_site: (
+ call_site_range,
+ call_site_focus.and_then(|FileRange { file_id, range }| {
+ if call_site_range.file_id == file_id && call_site_range.range.contains_range(range)
+ {
+ Some(range)
+ } else {
+ None
+ }
+ }),
+ ),
+ def_site: def_site.map(|(def_site_range, def_site_focus)| {
+ (
+ def_site_range,
+ def_site_focus.and_then(|FileRange { file_id, range }| {
+ if def_site_range.file_id == file_id
+ && def_site_range.range.contains_range(range)
+ {
+ Some(range)
+ } else {
+ None
+ }
+ }),
+ )
+ }),
+ }
+}
+
+fn orig_range(
+ db: &RootDatabase,
+ hir_file: HirFileId,
+ value: &SyntaxNode,
+) -> UpmappingResult<(FileRange, Option<TextRange>)> {
+ UpmappingResult {
+ call_site: (InFile::new(hir_file, value).original_file_range(db), None),
+ def_site: None,
+ }
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
index 506f9452c..413dbf9c5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
@@ -45,11 +45,11 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
Some(module) => sema
.to_def(&module)
.into_iter()
- .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .flat_map(|module| NavigationTarget::from_module_to_decl(db, module))
.collect(),
None => sema
.to_module_defs(position.file_id)
- .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .flat_map(|module| NavigationTarget::from_module_to_decl(db, module))
.collect(),
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index 2d0295692..6c0fb0baf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -9,7 +9,9 @@
//! at the index that the match starts at and its tree parent is
//! resolved to the search element definition, we get a reference.
-use hir::{PathResolution, Semantics};
+use std::collections::HashMap;
+
+use hir::{DescendPreference, PathResolution, Semantics};
use ide_db::{
base_db::FileId,
defs::{Definition, NameClass, NameRefClass},
@@ -60,19 +62,6 @@ pub(crate) fn find_all_refs(
let syntax = sema.parse(position.file_id).syntax().clone();
let make_searcher = |literal_search: bool| {
move |def: Definition| {
- let declaration = match def {
- Definition::Module(module) => {
- Some(NavigationTarget::from_module_to_decl(sema.db, module))
- }
- def => def.try_to_nav(sema.db),
- }
- .map(|nav| {
- let decl_range = nav.focus_or_full_range();
- Declaration {
- is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
- nav,
- }
- });
let mut usages =
def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all();
@@ -80,7 +69,7 @@ pub(crate) fn find_all_refs(
retain_adt_literal_usages(&mut usages, def, sema);
}
- let references = usages
+ let mut references = usages
.into_iter()
.map(|(file_id, refs)| {
(
@@ -91,8 +80,30 @@ pub(crate) fn find_all_refs(
.collect(),
)
})
- .collect();
-
+ .collect::<HashMap<_, Vec<_>, _>>();
+ let declaration = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .map(|nav| {
+ let (nav, extra_ref) = match nav.def_site {
+ Some(call) => (call, Some(nav.call_site)),
+ None => (nav.call_site, None),
+ };
+ if let Some(extra_ref) = extra_ref {
+ references
+ .entry(extra_ref.file_id)
+ .or_default()
+ .push((extra_ref.focus_or_full_range(), None));
+ }
+ let decl_range = nav.focus_or_full_range();
+ Declaration {
+ is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
+ nav,
+ }
+ });
ReferenceSearchResult { declaration, references }
}
};
@@ -109,7 +120,7 @@ pub(crate) fn find_all_refs(
}
None => {
let search = make_searcher(false);
- Some(find_defs(sema, &syntax, position.offset)?.map(search).collect())
+ Some(find_defs(sema, &syntax, position.offset)?.into_iter().map(search).collect())
}
}
}
@@ -118,15 +129,27 @@ pub(crate) fn find_defs<'a>(
sema: &'a Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
offset: TextSize,
-) -> Option<impl Iterator<Item = Definition> + 'a> {
+) -> Option<impl IntoIterator<Item = Definition> + 'a> {
let token = syntax.token_at_offset(offset).find(|t| {
matches!(
t.kind(),
- IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | STRING
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
)
- });
- token.map(|token| {
- sema.descend_into_macros_with_same_text(token, offset)
+ })?;
+
+ if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
+ return resolution.map(Definition::from).map(|it| vec![it]);
+ }
+
+ Some(
+ sema.descend_into_macros(DescendPreference::SameText, token)
.into_iter()
.filter_map(|it| ast::NameLike::cast(it.parent()?))
.filter_map(move |name_like| {
@@ -162,7 +185,8 @@ pub(crate) fn find_defs<'a>(
};
Some(def)
})
- })
+ .collect(),
+ )
}
pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
@@ -684,6 +708,32 @@ enum Foo {
}
#[test]
+ fn test_self() {
+ check(
+ r#"
+struct S$0<T> {
+ t: PhantomData<T>,
+}
+
+impl<T> S<T> {
+ fn new() -> Self {
+ Self {
+ t: Default::default(),
+ }
+ }
+}
+"#,
+ expect![[r#"
+ S Struct FileId(0) 0..38 7..8
+
+ FileId(0) 48..49
+ FileId(0) 71..75
+ FileId(0) 86..90
+ "#]],
+ )
+ }
+
+ #[test]
fn test_find_all_refs_two_modules() {
check(
r#"
@@ -843,7 +893,7 @@ pub(super) struct Foo$0 {
check_with_scope(
code,
- Some(SearchScope::single_file(FileId(2))),
+ Some(SearchScope::single_file(FileId::from_raw(2))),
expect![[r#"
quux Function FileId(0) 19..35 26..30
@@ -1142,7 +1192,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
}
"#,
expect![[r#"
- 'a LifetimeParam FileId(0) 55..57 55..57
+ 'a LifetimeParam FileId(0) 55..57
FileId(0) 63..65
FileId(0) 71..73
@@ -1160,7 +1210,7 @@ fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
type Foo<'a, T> where T: 'a$0 = &'a T;
"#,
expect![[r#"
- 'a LifetimeParam FileId(0) 9..11 9..11
+ 'a LifetimeParam FileId(0) 9..11
FileId(0) 25..27
FileId(0) 31..33
@@ -1182,7 +1232,7 @@ impl<'a> Foo<'a> for &'a () {
}
"#,
expect![[r#"
- 'a LifetimeParam FileId(0) 47..49 47..49
+ 'a LifetimeParam FileId(0) 47..49
FileId(0) 55..57
FileId(0) 64..66
@@ -2066,4 +2116,27 @@ fn main() { r#fn(); }
"#]],
);
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ r#"
+//- minicore: fmt
+fn test() {
+ let a = "foo";
+ format_args!("hello {a} {a$0} {}", a);
+ // ^
+ // ^
+ // ^
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 20..21 20..21
+
+ FileId(0) 56..57 Read
+ FileId(0) 60..61 Read
+ FileId(0) 68..69 Read
+ "#]],
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index ac9df5ed6..1febfabfc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -4,16 +4,18 @@
//! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method).
-use hir::{AsAssocItem, InFile, Semantics};
+use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use ide_db::{
- base_db::FileId,
+ base_db::{FileId, FileRange},
defs::{Definition, NameClass, NameRefClass},
rename::{bail, format_err, source_edit_from_references, IdentifierKind},
RootDatabase,
};
use itertools::Itertools;
use stdx::{always, never};
-use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize};
+use syntax::{
+ ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
use text_edit::TextEdit;
@@ -34,23 +36,20 @@ pub(crate) fn prepare_rename(
let syntax = source_file.syntax();
let res = find_definitions(&sema, syntax, position)?
- .map(|(name_like, def)| {
+ .map(|(frange, kind, def)| {
// ensure all ranges are valid
if def.range_for_rename(&sema).is_none() {
bail!("No references found at position")
}
- let Some(frange) = sema.original_range_opt(name_like.syntax()) else {
- bail!("No references found at position");
- };
always!(
frange.range.contains_inclusive(position.offset)
&& frange.file_id == position.file_id
);
- Ok(match name_like {
- ast::NameLike::Lifetime(_) => {
+ Ok(match kind {
+ SyntaxKind::LIFETIME => {
TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end())
}
_ => frange.range,
@@ -93,7 +92,7 @@ pub(crate) fn rename(
let defs = find_definitions(&sema, syntax, position)?;
let ops: RenameResult<Vec<SourceChange>> = defs
- .map(|(_namelike, def)| {
+ .map(|(.., def)| {
if let Definition::Local(local) = def {
if let Some(self_param) = local.as_self_param(sema.db) {
cov_mark::hit!(rename_self_to_param);
@@ -134,11 +133,27 @@ pub(crate) fn will_rename_file(
fn find_definitions(
sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
- position: FilePosition,
-) -> RenameResult<impl Iterator<Item = (ast::NameLike, Definition)>> {
- let symbols = sema
- .find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, position.offset)
- .map(|name_like| {
+ FilePosition { file_id, offset }: FilePosition,
+) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition)>> {
+ let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
+
+ if let Some((range, Some(resolution))) =
+ token.and_then(|token| sema.check_for_format_args_template(token, offset))
+ {
+ return Ok(vec![(
+ FileRange { file_id, range },
+ SyntaxKind::STRING,
+ Definition::from(resolution),
+ )]
+ .into_iter());
+ }
+
+ let symbols =
+ sema.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, offset).map(|name_like| {
+ let kind = name_like.syntax().kind();
+ let range = sema
+ .original_range_opt(name_like.syntax())
+ .ok_or_else(|| format_err!("No references found at position"))?;
let res = match &name_like {
// renaming aliases would rename the item being aliased as the HIR doesn't track aliases yet
ast::NameLike::Name(name)
@@ -163,7 +178,6 @@ fn find_definitions(
Definition::Local(local_def)
}
})
- .map(|def| (name_like.clone(), def))
.ok_or_else(|| format_err!("No references found at position")),
ast::NameLike::NameRef(name_ref) => {
NameRefClass::classify(sema, name_ref)
@@ -187,7 +201,7 @@ fn find_definitions(
{
Err(format_err!("Renaming aliases is currently unsupported"))
} else {
- Ok((name_like.clone(), def))
+ Ok(def)
}
})
}
@@ -203,11 +217,10 @@ fn find_definitions(
_ => None,
})
})
- .map(|def| (name_like, def))
.ok_or_else(|| format_err!("No references found at position"))
}
};
- res
+ res.map(|def| (range, kind, def))
});
let res: RenameResult<Vec<_>> = symbols.collect();
@@ -218,7 +231,7 @@ fn find_definitions(
Err(format_err!("No references found at position"))
} else {
// remove duplicates, comparing `Definition`s
- Ok(v.into_iter().unique_by(|t| t.1))
+ Ok(v.into_iter().unique_by(|&(.., def)| def).collect::<Vec<_>>().into_iter())
}
}
Err(e) => Err(e),
@@ -2663,4 +2676,44 @@ struct A;
"error: Cannot rename a non-local definition.",
)
}
+
+ #[test]
+ fn implicit_format_args() {
+ check(
+ "fbar",
+ r#"
+//- minicore: fmt
+fn test() {
+ let foo = "foo";
+ format_args!("hello {foo} {foo$0} {}", foo);
+}
+"#,
+ r#"
+fn test() {
+ let fbar = "foo";
+ format_args!("hello {fbar} {fbar} {}", fbar);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn implicit_format_args2() {
+ check(
+ "fo",
+ r#"
+//- minicore: fmt
+fn test() {
+ let foo = "foo";
+ format_args!("hello {foo} {foo$0} {}", foo);
+}
+"#,
+ r#"
+fn test() {
+ let fo = "foo";
+ format_args!("hello {fo} {fo} {}", fo);
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index 2d528c642..d334e66d3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -2,14 +2,14 @@ use std::fmt;
use ast::HasName;
use cfg::CfgExpr;
-use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics};
+use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics};
use ide_assists::utils::test_related_attribute;
use ide_db::{
base_db::{FilePosition, FileRange},
defs::Definition,
documentation::docs_from_attrs,
helpers::visit_file_defs,
- search::SearchScope,
+ search::{FileReferenceNode, SearchScope},
FxHashMap, FxHashSet, RootDatabase, SymbolKind,
};
use itertools::Itertools;
@@ -142,7 +142,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
Definition::Function(it) => it.source(db).map(|src| src.file_id),
_ => None,
};
- if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) {
+ if let Some(file_id) = file_id.filter(|file| file.macro_file().is_some()) {
in_macro_expansion.entry(file_id).or_default().push(runnable);
return;
}
@@ -240,7 +240,7 @@ fn find_related_tests(
.flatten();
for ref_ in defs {
let name_ref = match ref_.name {
- ast::NameLike::NameRef(name_ref) => name_ref,
+ FileReferenceNode::NameRef(name_ref) => name_ref,
_ => continue,
};
if let Some(fn_def) =
@@ -308,11 +308,7 @@ pub(crate) fn runnable_fn(
sema: &Semantics<'_, RootDatabase>,
def: hir::Function,
) -> Option<Runnable> {
- let name = def.name(sema.db).to_smol_str();
-
- let root = def.module(sema.db).krate().root_module();
-
- let kind = if name == "main" && def.module(sema.db) == root {
+ let kind = if def.is_main(sema.db) {
RunnableKind::Bin
} else {
let test_id = || {
@@ -320,7 +316,9 @@ pub(crate) fn runnable_fn(
let def: hir::ModuleDef = def.into();
def.canonical_path(sema.db)
};
- canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name))
+ canonical_path
+ .map(TestId::Path)
+ .unwrap_or(TestId::Name(def.name(sema.db).to_smol_str()))
};
if def.is_test(sema.db) {
@@ -337,7 +335,8 @@ pub(crate) fn runnable_fn(
sema.db,
def.source(sema.db)?.as_ref().map(|it| it as &dyn ast::HasName),
SymbolKind::Function,
- );
+ )
+ .call_site();
let cfg = def.attrs(sema.db).cfg();
Some(Runnable { use_name_in_title: false, nav, kind, cfg })
}
@@ -359,7 +358,7 @@ pub(crate) fn runnable_mod(
let attrs = def.attrs(sema.db);
let cfg = attrs.cfg();
- let nav = NavigationTarget::from_module_to_decl(sema.db, def);
+ let nav = NavigationTarget::from_module_to_decl(sema.db, def).call_site();
Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg })
}
@@ -372,7 +371,7 @@ pub(crate) fn runnable_impl(
return None;
}
let cfg = attrs.cfg();
- let nav = def.try_to_nav(sema.db)?;
+ let nav = def.try_to_nav(sema.db)?.call_site();
let ty = def.self_ty(sema.db);
let adt_name = ty.as_adt()?.name(sema.db);
let mut ty_args = ty.generic_parameters(sema.db).peekable();
@@ -409,7 +408,7 @@ fn runnable_mod_outline_definition(
match def.definition_source(sema.db).value {
hir::ModuleSource::SourceFile(_) => Some(Runnable {
use_name_in_title: false,
- nav: def.to_nav(sema.db),
+ nav: def.to_nav(sema.db).call_site(),
kind: RunnableKind::TestMod { path },
cfg,
}),
@@ -467,7 +466,8 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
let mut nav = match def {
Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def),
def => def.try_to_nav(db)?,
- };
+ }
+ .call_site();
nav.focus_range = None;
nav.description = None;
nav.docs = None;
@@ -587,6 +587,9 @@ mod tests {
$0
fn main() {}
+#[export_name = "main"]
+fn __cortex_m_rt_main_trampoline() {}
+
#[test]
fn test_foo() {}
@@ -604,7 +607,7 @@ mod not_a_root {
fn main() {}
}
"#,
- &[TestMod, Bin, Test, Test, Test, Bench],
+ &[TestMod, Bin, Bin, Test, Test, Test, Bench],
expect![[r#"
[
Runnable {
@@ -613,7 +616,7 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 0..190,
+ full_range: 0..253,
name: "",
kind: Module,
},
@@ -642,8 +645,22 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 15..39,
- focus_range: 26..34,
+ full_range: 15..76,
+ focus_range: 42..71,
+ name: "__cortex_m_rt_main_trampoline",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 78..102,
+ focus_range: 89..97,
name: "test_foo",
kind: Function,
},
@@ -663,8 +680,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 41..92,
- focus_range: 73..87,
+ full_range: 104..155,
+ focus_range: 136..150,
name: "test_full_path",
kind: Function,
},
@@ -684,8 +701,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 94..128,
- focus_range: 115..123,
+ full_range: 157..191,
+ focus_range: 178..186,
name: "test_foo",
kind: Function,
},
@@ -705,8 +722,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 130..152,
- focus_range: 142..147,
+ full_range: 193..215,
+ focus_range: 205..210,
name: "bench",
kind: Function,
},
@@ -1655,12 +1672,18 @@ macro_rules! gen2 {
}
}
}
+macro_rules! gen_main {
+ () => {
+ fn main() {}
+ }
+}
mod tests {
gen!();
}
gen2!();
+gen_main!();
"#,
- &[TestMod, TestMod, Test, Test, TestMod],
+ &[TestMod, TestMod, Test, Test, TestMod, Bin],
expect![[r#"
[
Runnable {
@@ -1669,7 +1692,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 0..237,
+ full_range: 0..315,
name: "",
kind: Module,
},
@@ -1684,8 +1707,8 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 202..227,
- focus_range: 206..211,
+ full_range: 267..292,
+ focus_range: 271..276,
name: "tests",
kind: Module,
description: "mod tests",
@@ -1701,7 +1724,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 218..225,
+ full_range: 283..290,
name: "foo_test",
kind: Function,
},
@@ -1721,7 +1744,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 228..236,
+ full_range: 293..301,
name: "foo_test2",
kind: Function,
},
@@ -1741,7 +1764,7 @@ gen2!();
file_id: FileId(
0,
),
- full_range: 228..236,
+ full_range: 293..301,
name: "tests2",
kind: Module,
description: "mod tests2",
@@ -1751,6 +1774,19 @@ gen2!();
},
cfg: None,
},
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 302..314,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
]
"#]],
);
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
index e020b52e1..990376a49 100644
--- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -4,7 +4,10 @@
use std::collections::BTreeSet;
use either::Either;
-use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait};
+use hir::{
+ AssocItem, DescendPreference, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics,
+ Trait,
+};
use ide_db::{
active_parameter::{callable_for_node, generic_def_for_node},
base_db::FilePosition,
@@ -79,7 +82,7 @@ pub(crate) fn signature_help(
// if the cursor is sandwiched between two space tokens and the call is unclosed
// this prevents us from leaving the CallExpression
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
- let token = sema.descend_into_macros_single(token, offset);
+ let token = sema.descend_into_macros_single(DescendPreference::None, token);
for node in token.parent_ancestors() {
match_ast! {
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index aabd26da2..3724dc282 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -3,7 +3,7 @@
use std::collections::HashMap;
-use hir::{db::HirDatabase, Crate, Module};
+use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
use ide_db::helpers::get_definition;
use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt},
@@ -13,6 +13,7 @@ use ide_db::{
use syntax::{AstNode, SyntaxKind::*, TextRange, T};
use crate::inlay_hints::InlayFieldsToResolve;
+use crate::navigation_target::UpmappingResult;
use crate::{
hover::hover_for_definition,
inlay_hints::AdjustmentHintsMode,
@@ -118,6 +119,7 @@ impl StaticIndex<'_> {
adjustment_hints: crate::AdjustmentHints::Never,
adjustment_hints_mode: AdjustmentHintsMode::Prefix,
adjustment_hints_hide_outside_unsafe: false,
+ implicit_drop_hints: false,
hide_named_constructor_hints: false,
hide_closure_initialization_hints: false,
closure_style: hir::ClosureStyle::ImplFn,
@@ -165,9 +167,8 @@ impl StaticIndex<'_> {
} else {
let it = self.tokens.insert(TokenStaticData {
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
- definition: def.try_to_nav(self.db).map(|it| FileRange {
- file_id: it.file_id,
- range: it.focus_or_full_range(),
+ definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| {
+ FileRange { file_id: it.file_id, range: it.focus_or_full_range() }
}),
references: vec![],
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
@@ -178,7 +179,7 @@ impl StaticIndex<'_> {
let token = self.tokens.get_mut(id).unwrap();
token.references.push(ReferenceData {
range: FileRange { range, file_id },
- is_definition: match def.try_to_nav(self.db) {
+ is_definition: match def.try_to_nav(self.db).map(UpmappingResult::call_site) {
Some(it) => it.file_id == file_id && it.focus_or_full_range() == range,
None => false,
},
@@ -242,6 +243,7 @@ mod tests {
}
}
+ #[track_caller]
fn check_definitions(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis);
diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs
index c9ee460a1..e7f97ebe6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/status.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/status.rs
@@ -2,7 +2,7 @@ use std::{fmt, marker::PhantomData};
use hir::{
db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery},
- Attr, Attrs, ExpandResult, MacroFile, Module,
+ Attr, Attrs, ExpandResult, MacroFileId, Module,
};
use ide_db::{
base_db::{
@@ -199,8 +199,12 @@ impl StatCollect<FileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> {
}
}
-impl<M> StatCollect<MacroFile, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
- fn collect_entry(&mut self, _: MacroFile, value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>) {
+impl<M> StatCollect<MacroFileId, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
+ fn collect_entry(
+ &mut self,
+ _: MacroFileId,
+ value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>,
+ ) {
self.total += 1;
self.retained += value.is_some() as usize;
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index bb01c81d6..307812156 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -13,7 +13,7 @@ mod html;
#[cfg(test)]
mod tests;
-use hir::{Name, Semantics};
+use hir::{DescendPreference, Name, Semantics};
use ide_db::{FxHashMap, RootDatabase, SymbolKind};
use syntax::{
ast::{self, IsString},
@@ -245,7 +245,7 @@ fn traverse(
let mut macro_highlighter = MacroHighlighter::default();
// FIXME: these are not perfectly accurate, we determine them by the real file's syntax tree
- // an an attribute nested in a macro call will not emit `inside_attribute`
+ // an attribute nested in a macro call will not emit `inside_attribute`
let mut inside_attribute = false;
let mut inside_macro_call = false;
@@ -393,13 +393,18 @@ fn traverse(
// Attempt to descend tokens into macro-calls.
let res = match element {
NodeOrToken::Token(token) if token.kind() != COMMENT => {
- let token = match attr_or_derive_item {
- Some(AttrOrDerive::Attr(_)) => {
- sema.descend_into_macros_with_kind_preference(token, 0.into())
- }
- Some(AttrOrDerive::Derive(_)) | None => {
- sema.descend_into_macros_single(token, 0.into())
- }
+ let token = if token.kind() == STRING {
+ // for strings, try to prefer a string that has not been lost in a token
+ // tree
+ // FIXME: This should be done for everything, but check perf first
+ sema.descend_into_macros(DescendPreference::SameKind, token)
+ .into_iter()
+ .max_by_key(|it| {
+ it.parent().map_or(false, |it| it.kind() != TOKEN_TREE)
+ })
+ .unwrap()
+ } else {
+ sema.descend_into_macros_single(DescendPreference::SameKind, token)
};
match token.parent().and_then(ast::NameLike::cast) {
// Remap the token into the wrapping single token nodes
@@ -441,7 +446,7 @@ fn traverse(
{
continue;
}
- highlight_format_string(hl, &string, &expanded_string, range);
+ highlight_format_string(hl, sema, krate, &string, &expanded_string, range);
if !string.is_raw() {
highlight_escape_string(hl, &string, range.start());
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
index 2ef131594..518e71454 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/format.rs
@@ -1,14 +1,20 @@
//! Syntax highlighting for format macro strings.
use ide_db::{
+ defs::Definition,
syntax_helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
SymbolKind,
};
use syntax::{ast, TextRange};
-use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag};
+use crate::{
+ syntax_highlighting::{highlight::highlight_def, highlights::Highlights},
+ HlRange, HlTag,
+};
pub(super) fn highlight_format_string(
stack: &mut Highlights,
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ krate: hir::Crate,
string: &ast::String,
expanded_string: &ast::String,
range: TextRange,
@@ -27,6 +33,18 @@ pub(super) fn highlight_format_string(
});
}
});
+
+ if let Some(parts) = sema.as_format_args_parts(string) {
+ parts.into_iter().for_each(|(range, res)| {
+ if let Some(res) = res {
+ stack.add(HlRange {
+ range,
+ highlight: highlight_def(sema, krate, Definition::from(res)),
+ binding_hash: None,
+ })
+ }
+ })
+ }
}
fn highlight_format_specifier(kind: FormatSpecifier) -> Option<HlTag> {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 7d00282fc..0558f658f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -1,6 +1,6 @@
//! Computes color for a single element.
-use hir::{AsAssocItem, HasVisibility, Semantics};
+use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics};
use ide_db::{
defs::{Definition, IdentClass, NameClass, NameRefClass},
FxHashMap, RootDatabase, SymbolKind,
@@ -218,7 +218,10 @@ fn highlight_name_ref(
// We can fix this for derive attributes since derive helpers are recorded, but not for
// general attributes.
None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR)
- && !sema.hir_file_for(name_ref.syntax()).is_derive_attr_pseudo_expansion(sema.db) =>
+ && !sema
+ .hir_file_for(name_ref.syntax())
+ .macro_file()
+ .map_or(false, |it| it.is_derive_attr_pseudo_expansion(sema.db)) =>
{
return HlTag::Symbol(SymbolKind::Attribute).into();
}
@@ -348,7 +351,7 @@ fn calc_binding_hash(name: &hir::Name, shadow_count: u32) -> u64 {
hash((name, shadow_count))
}
-fn highlight_def(
+pub(super) fn highlight_def(
sema: &Semantics<'_, RootDatabase>,
krate: hir::Crate,
def: Definition,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index 06b66b302..e8b3a38c9 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -43,7 +43,9 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace macro">{</span>
+<pre><code><span class="keyword">use</span> <span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="brace">{</span><span class="function library">mirror</span><span class="comma">,</span> <span class="function library">identity</span><span class="comma">,</span> <span class="derive library">DeriveIdentity</span><span class="brace">}</span><span class="semicolon">;</span>
+
+<span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace macro">{</span>
<span class="brace macro">{</span>
<span class="comma macro">,</span><span class="builtin_type macro">i32</span> <span class="colon macro">:</span><span class="field declaration macro public">x</span> <span class="keyword macro">pub</span>
<span class="comma macro">,</span><span class="builtin_type macro">i32</span> <span class="colon macro">:</span><span class="field declaration macro public">y</span> <span class="keyword macro">pub</span>
@@ -90,17 +92,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">concat</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">include</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="macro">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+<span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
- <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">noop</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 64e614cec..84a823363 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -48,47 +48,38 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>io<span class="colon">:</span><span class="colon">:</span>_print<span class="parenthesis">(</span>format_args_nl<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span><span class="parenthesis">)</span>
<span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args_nl</span> <span class="brace">{</span><span class="brace">}</span>
<span class="keyword">mod</span> <span class="module declaration">panic</span> <span class="brace">{</span>
<span class="keyword">pub</span> <span class="keyword">macro</span> <span class="macro declaration">panic_2015</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
+ panic<span class="parenthesis">(</span><span class="string_literal">"explicit panic"</span><span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>literal <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ panic<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Use `panic_str` instead of `panic_display::&lt;&str&gt;` for non_fmt_panic lint.</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
+ panic_str<span class="parenthesis">(</span><span class="punctuation">$</span>msg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="comment">// Special-case the single-argument case for const_panic.</span>
<span class="parenthesis">(</span><span class="string_literal">"{}"</span><span class="comma">,</span> <span class="punctuation">$</span>arg<span class="colon">:</span>expr <span class="punctuation">$</span><span class="parenthesis">(</span><span class="comma">,</span><span class="parenthesis">)</span><span class="punctuation">?</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_display<span class="parenthesis">(</span><span class="punctuation">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
+ panic_display<span class="parenthesis">(</span><span class="punctuation">&</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="colon">:</span>expr<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span>
- <span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panicking<span class="colon">:</span><span class="colon">:</span>panic_fmt<span class="parenthesis">(</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
+ panic_fmt<span class="parenthesis">(</span>const_format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>fmt<span class="comma">,</span> <span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span>
<span class="parenthesis">)</span><span class="comma">,</span>
<span class="brace">}</span>
<span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="parenthesis attribute">(</span><span class="none attribute">std_panic</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">macro_export</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">panic</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">assert</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">asm</span> <span class="brace">{</span><span class="brace">}</span>
-<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
-<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">concat</span> <span class="brace">{</span><span class="brace">}</span>
-
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">toho</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented"</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="punctuation">$</span>crate<span class="colon">:</span><span class="colon">:</span>panic<span class="punctuation">!</span><span class="parenthesis">(</span><span class="string_literal">"not yet implemented: {}"</span><span class="comma">,</span> format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="parenthesis">)</span><span class="punctuation">+</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
<span class="brace">}</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">reuse_twice</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="colon">:</span>literal<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span><span class="brace">{</span>stringify<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="semicolon">;</span> format_args<span class="punctuation">!</span><span class="parenthesis">(</span><span class="punctuation">$</span>literal<span class="parenthesis">)</span><span class="brace">}</span><span class="brace">}</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\n</span><span class="char_literal">'</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\t</span><span class="char_literal">'</span><span class="semicolon">;</span>
@@ -165,20 +156,23 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable declaration macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="variable">x</span><span class="format_specifier">?</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> "</span><span class="comma macro">,</span> <span class="unresolved_reference macro">thingy</span><span class="comma macro">,</span> <span class="unresolved_reference macro">n2</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"more {}"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"{} asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">panic</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"more </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">i</span><span class="colon">:</span> <span class="builtin_type">u64</span> <span class="operator">=</span> <span class="numeric_literal">3</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">o</span><span class="colon">:</span> <span class="builtin_type">u64</span><span class="semicolon">;</span>
- <span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
- <span class="string_literal macro">"mov {0}, {1}"</span><span class="comma macro">,</span>
- <span class="string_literal macro">"add {0}, 5"</span><span class="comma macro">,</span>
+ <span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
+ <span class="string_literal macro">"mov </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">, </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span>
+ <span class="string_literal macro">"add </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">, 5"</span><span class="comma macro">,</span>
<span class="none macro">out</span><span class="parenthesis macro">(</span><span class="none macro">reg</span><span class="parenthesis macro">)</span> <span class="none macro">o</span><span class="comma macro">,</span>
<span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="none macro">reg</span><span class="parenthesis macro">)</span> <span class="none macro">i</span><span class="comma macro">,</span>
<span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="comma macro">,</span> <span class="macro default_library library macro">format_args</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="unresolved_reference macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="macro macro">toho</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="keyword">const</span> <span class="constant declaration">CONSTANT</span><span class="colon">:</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="colon">:</span>
+ <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">m</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro default_library library macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable reference">backslash</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="constant">CONSTANT</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="variable mutable">m</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="comma macro">,</span> <span class="macro default_library library macro">format_args</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="unresolved_reference macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="macro macro">toho</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">reuse_twice</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable reference">backslash</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index 542d89925..afb6c555b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -47,9 +47,12 @@ struct Foo;
fn macros() {
check_highlighting(
r#"
-//- proc_macros: mirror
+//- proc_macros: mirror, identity, derive_identity
+//- minicore: fmt, include, concat
//- /lib.rs crate:lib
-proc_macros::mirror! {
+use proc_macros::{mirror, identity, DeriveIdentity};
+
+mirror! {
{
,i32 :x pub
,i32 :y pub
@@ -96,12 +99,6 @@ macro without_args {
}
}
-#[rustc_builtin_macro]
-macro_rules! concat {}
-#[rustc_builtin_macro]
-macro_rules! include {}
-#[rustc_builtin_macro]
-macro_rules! format_args {}
include!(concat!("foo/", "foo.rs"));
@@ -401,53 +398,44 @@ fn test_string_highlighting() {
// thus, we have to copy the macro definition from `std`
check_highlighting(
r#"
-//- minicore: fmt
+//- minicore: fmt, assert, asm, concat, panic
macro_rules! println {
($($arg:tt)*) => ({
$crate::io::_print(format_args_nl!($($arg)*));
})
}
-#[rustc_builtin_macro]
-#[macro_export]
-macro_rules! format_args_nl {}
mod panic {
pub macro panic_2015 {
() => (
- $crate::panicking::panic("explicit panic")
+ panic("explicit panic")
),
($msg:literal $(,)?) => (
- $crate::panicking::panic($msg)
+ panic($msg)
),
// Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
($msg:expr $(,)?) => (
- $crate::panicking::panic_str($msg)
+ panic_str($msg)
),
// Special-case the single-argument case for const_panic.
("{}", $arg:expr $(,)?) => (
- $crate::panicking::panic_display(&$arg)
+ panic_display(&$arg)
),
($fmt:expr, $($arg:tt)+) => (
- $crate::panicking::panic_fmt(const_format_args!($fmt, $($arg)+))
+ panic_fmt(const_format_args!($fmt, $($arg)+))
),
}
}
-#[rustc_builtin_macro(std_panic)]
-#[macro_export]
-macro_rules! panic {}
-#[rustc_builtin_macro]
-macro_rules! assert {}
-#[rustc_builtin_macro]
-macro_rules! asm {}
-#[rustc_builtin_macro]
-macro_rules! concat {}
-
macro_rules! toho {
() => ($crate::panic!("not yet implemented"));
($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", format_args!($($arg)+)));
}
+macro_rules! reuse_twice {
+ ($literal:literal) => {{stringify!($literal); format_args!($literal)}};
+}
+
fn main() {
let a = '\n';
let a = '\t';
@@ -538,8 +526,11 @@ fn main() {
in(reg) i,
);
+ const CONSTANT: () = ():
+ let mut m = ();
format_args!(concat!("{}"), "{}");
- format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
+ format_args!("{} {} {} {} {} {} {backslash} {CONSTANT} {m}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
+ reuse_twice!("{backslash}");
}"#,
expect_file!["./test_data/highlight_strings.html"],
false,
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index b40509715..d21850bcf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -47,7 +47,7 @@ struct ExtendedTextEdit {
// - typing `=` between two expressions adds `;` when in statement position
// - typing `=` to turn an assignment into an equality comparison removes `;` when in expression position
// - typing `.` in a chain method call auto-indents
-// - typing `{` in front of an expression inserts a closing `}` after the expression
+// - typing `{` or `(` in front of an expression inserts a closing `}` or `)` after the expression
// - typing `{` in a use item adds a closing `}` in the right place
//
// VS Code::
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
index d2bbbf6d2..9abe54cd3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_hir.rs
@@ -1,7 +1,7 @@
use hir::{DefWithBody, Semantics};
use ide_db::base_db::FilePosition;
use ide_db::RootDatabase;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode};
// Feature: View Hir
//
@@ -19,7 +19,9 @@ fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
- let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let item = ancestors_at_offset(source_file.syntax(), position.offset)
+ .filter(|it| !ast::MacroCall::can_cast(it.kind()))
+ .find_map(ast::Item::cast)?;
let def: DefWithBody = match item {
ast::Item::Fn(it) => sema.to_def(&it)?.into(),
ast::Item::Const(it) => sema.to_def(&it)?.into(),
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
index 2f6332abd..3802978f4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
@@ -55,6 +55,7 @@ impl fmt::Display for RecursiveMemoryLayout {
}
}
+#[derive(Copy, Clone)]
enum FieldOrTupleIdx {
Field(Field),
TupleIdx(usize),
@@ -71,13 +72,6 @@ impl FieldOrTupleIdx {
FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(),
}
}
-
- fn index(&self) -> usize {
- match *self {
- FieldOrTupleIdx::Field(f) => f.index(),
- FieldOrTupleIdx::TupleIdx(i) => i,
- }
- }
}
// Feature: View Memory Layout
@@ -138,7 +132,10 @@ pub(crate) fn view_memory_layout(
return;
}
- fields.sort_by_key(|(f, _)| layout.field_offset(f.index()).unwrap());
+ fields.sort_by_key(|&(f, _)| match f {
+ FieldOrTupleIdx::Field(f) => layout.field_offset(f).unwrap_or(0),
+ FieldOrTupleIdx::TupleIdx(f) => layout.tuple_field_offset(f).unwrap_or(0),
+ });
let children_start = nodes.len();
nodes[parent_idx].children_start = children_start as i64;
@@ -151,7 +148,10 @@ pub(crate) fn view_memory_layout(
typename: child_ty.display(db).to_string(),
size: child_layout.size(),
alignment: child_layout.align(),
- offset: layout.field_offset(field.index()).unwrap_or(0),
+ offset: match *field {
+ FieldOrTupleIdx::Field(f) => layout.field_offset(f).unwrap_or(0),
+ FieldOrTupleIdx::TupleIdx(f) => layout.tuple_field_offset(f).unwrap_or(0),
+ },
parent_idx: parent_idx as i64,
children_start: -1,
children_len: 0,
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
index a36aba58b..08d810c13 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_mir.rs
@@ -1,7 +1,7 @@
use hir::{DefWithBody, Semantics};
use ide_db::base_db::FilePosition;
use ide_db::RootDatabase;
-use syntax::{algo::find_node_at_offset, ast, AstNode};
+use syntax::{algo::ancestors_at_offset, ast, AstNode};
// Feature: View Mir
//
@@ -18,7 +18,9 @@ fn body_mir(db: &RootDatabase, position: FilePosition) -> Option<String> {
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
- let item = find_node_at_offset::<ast::Item>(source_file.syntax(), position.offset)?;
+ let item = ancestors_at_offset(source_file.syntax(), position.offset)
+ .filter(|it| !ast::MacroCall::can_cast(it.kind()))
+ .find_map(ast::Item::cast)?;
let def: DefWithBody = match item {
ast::Item::Fn(it) => sema.to_def(&it)?.into(),
ast::Item::Const(it) => sema.to_def(&it)?.into(),
diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml
index 89b302c79..d9184b0fb 100644
--- a/src/tools/rust-analyzer/crates/intern/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
-dashmap = { version = "=5.4.0", features = ["raw-api"] }
+dashmap.workspace = true
hashbrown.workspace = true
rustc-hash = "1.1.0"
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/intern/src/lib.rs b/src/tools/rust-analyzer/crates/intern/src/lib.rs
index 2934d2667..d784321c7 100644
--- a/src/tools/rust-analyzer/crates/intern/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/intern/src/lib.rs
@@ -33,13 +33,10 @@ impl<T: Internable> Interned<T> {
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
- match shard.raw_entry_mut().from_key_hashed_nocheck(hash as u64, &obj) {
+ match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) {
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
RawEntryMut::Vacant(vac) => Self {
- arc: vac
- .insert_hashed_nocheck(hash as u64, Arc::new(obj), SharedValue::new(()))
- .0
- .clone(),
+ arc: vac.insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(())).0.clone(),
},
}
}
@@ -54,13 +51,10 @@ impl Interned<str> {
// - if not, box it up, insert it, and return a clone
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
// insert the same object between us looking it up and inserting it.
- match shard.raw_entry_mut().from_key_hashed_nocheck(hash as u64, s) {
+ match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
RawEntryMut::Vacant(vac) => Self {
- arc: vac
- .insert_hashed_nocheck(hash as u64, Arc::from(s), SharedValue::new(()))
- .0
- .clone(),
+ arc: vac.insert_hashed_nocheck(hash, Arc::from(s), SharedValue::new(())).0.clone(),
},
}
}
diff --git a/src/tools/rust-analyzer/crates/limit/src/lib.rs b/src/tools/rust-analyzer/crates/limit/src/lib.rs
index 7fb4b513a..7f4b00df0 100644
--- a/src/tools/rust-analyzer/crates/limit/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/limit/src/lib.rs
@@ -1,6 +1,6 @@
//! limit defines a struct to enforce limits.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[cfg(feature = "tracking")]
use std::sync::atomic::AtomicUsize;
diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
index f041ca88a..31b9f6c76 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
@@ -11,13 +11,13 @@ authors.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-anyhow = "1.0.62"
+anyhow.workspace = true
crossbeam-channel = "0.5.5"
-itertools = "0.10.5"
-tracing = "0.1.35"
+itertools.workspace = true
+tracing.workspace = true
ide.workspace = true
-ide-db.workspace =true
+ide-db.workspace = true
proc-macro-api.workspace = true
project-model.workspace = true
tt.workspace = true
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index 7a795dd62..db9654220 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -4,19 +4,19 @@
// to run rust-analyzer as a library.
use std::{collections::hash_map::Entry, mem, path::Path, sync};
-use ::tt::token_id as tt;
use crossbeam_channel::{unbounded, Receiver};
use ide::{AnalysisHost, Change, SourceRoot};
use ide_db::{
base_db::{
- CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
- ProcMacroLoadResult, ProcMacros,
+ span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
+ ProcMacroKind, ProcMacroLoadResult, ProcMacros,
},
FxHashMap,
};
use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer};
use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
+use tt::DelimSpan;
use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
pub struct LoadCargoConfig {
@@ -208,6 +208,7 @@ impl ProjectFolders {
let entry = {
let mut dirs = vfs::loader::Directories::default();
dirs.extensions.push("rs".into());
+ dirs.extensions.push("toml".into());
dirs.include.extend(root.include);
dirs.exclude.extend(root.exclude);
for excl in global_excludes {
@@ -373,12 +374,15 @@ struct Expander(proc_macro_api::ProcMacro);
impl ProcMacroExpander for Expander {
fn expand(
&self,
- subtree: &tt::Subtree,
- attrs: Option<&tt::Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
- match self.0.expand(subtree, attrs, env) {
+ match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
@@ -393,10 +397,13 @@ struct IdentityExpander;
impl ProcMacroExpander for IdentityExpander {
fn expand(
&self,
- subtree: &tt::Subtree,
- _: Option<&tt::Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ _: Option<&tt::Subtree<SpanData>>,
_: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ _: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@@ -408,11 +415,14 @@ struct EmptyExpander;
impl ProcMacroExpander for EmptyExpander {
fn expand(
&self,
- _: &tt::Subtree,
- _: Option<&tt::Subtree>,
+ _: &tt::Subtree<SpanData>,
+ _: Option<&tt::Subtree<SpanData>>,
_: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- Ok(tt::Subtree::empty())
+ call_site: SpanData,
+ _: SpanData,
+ _: SpanData,
+ ) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
+ Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }))
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
index 82105522e..adab1003d 100644
--- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -15,7 +15,7 @@ doctest = false
cov-mark = "2.0.0-pre.1"
rustc-hash = "1.1.0"
smallvec.workspace = true
-tracing = "0.1.35"
+tracing.workspace = true
# local deps
syntax.workspace = true
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index 9d43e1304..f503aecce 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
- syntax_node_to_token_tree, tt, DeclarativeMacro,
+ syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
};
#[test]
@@ -38,7 +38,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
- let res = rules[&id].expand(tt);
+ let res = rules[&id].expand(&tt, |_| ());
assert!(res.err.is_none());
res.value.token_trees.len()
})
@@ -47,14 +47,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413);
}
-fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
+fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
-fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@@ -64,14 +64,17 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
.filter_map(ast::MacroRules::cast)
.map(|rule| {
let id = rule.name().unwrap().to_string();
- let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
+ let def_tt =
+ syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
(id, def_tt)
})
.collect()
}
/// Generate random invocation fixtures from rules
-fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
+fn invocation_fixtures(
+ rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
+) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
let mut seed = 123456789;
let mut res = Vec::new();
@@ -93,8 +96,8 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
loop {
let mut subtree = tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
+ open: DUMMY,
+ close: DUMMY,
kind: tt::DelimiterKind::Invisible,
},
token_trees: vec![],
@@ -102,7 +105,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed);
}
- if it.expand(subtree.clone()).err.is_none() {
+ if it.expand(&subtree, |_| ()).err.is_none() {
res.push((name.clone(), subtree));
break;
}
@@ -116,7 +119,11 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
}
return res;
- fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
+ fn collect_from_op(
+ op: &Op<DummyTestSpanData>,
+ parent: &mut tt::Subtree<DummyTestSpanData>,
+ seed: &mut usize,
+ ) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@@ -202,38 +209,21 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
- fn make_ident(ident: &str) -> tt::TokenTree {
- tt::Leaf::Ident(tt::Ident {
- span: tt::TokenId::unspecified(),
- text: SmolStr::new(ident),
- })
- .into()
+ fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
}
- fn make_punct(char: char) -> tt::TokenTree {
- tt::Leaf::Punct(tt::Punct {
- span: tt::TokenId::unspecified(),
- char,
- spacing: tt::Spacing::Alone,
- })
- .into()
+ fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
}
- fn make_literal(lit: &str) -> tt::TokenTree {
- tt::Leaf::Literal(tt::Literal {
- span: tt::TokenId::unspecified(),
- text: SmolStr::new(lit),
- })
- .into()
+ fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
+ tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
}
fn make_subtree(
kind: tt::DelimiterKind,
- token_trees: Option<Vec<tt::TokenTree>>,
- ) -> tt::TokenTree {
+ token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
+ ) -> tt::TokenTree<DummyTestSpanData> {
tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
- kind,
- },
+ delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
token_trees: token_trees.unwrap_or_default(),
}
.into()
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index f2d89d3ef..0e755f69b 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -7,15 +7,17 @@ mod transcriber;
use rustc_hash::FxHashMap;
use syntax::SmolStr;
+use tt::Span;
-use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult};
+use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
-pub(crate) fn expand_rules(
- rules: &[crate::Rule],
- input: &tt::Subtree,
+pub(crate) fn expand_rules<S: Span>(
+ rules: &[crate::Rule<S>],
+ input: &tt::Subtree<S>,
+ marker: impl Fn(&mut S) + Copy,
is_2021: bool,
-) -> ExpandResult<tt::Subtree> {
- let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
+) -> ExpandResult<tt::Subtree<S>> {
+ let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
for rule in rules {
let new_match = matcher::match_(&rule.lhs, input, is_2021);
@@ -24,7 +26,7 @@ pub(crate) fn expand_rules(
// Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail.
let ExpandResult { value, err: transcribe_err } =
- transcriber::transcribe(&rule.rhs, &new_match.bindings);
+ transcriber::transcribe(&rule.rhs, &new_match.bindings, marker);
if transcribe_err.is_none() {
return ExpandResult::ok(value);
}
@@ -43,11 +45,11 @@ pub(crate) fn expand_rules(
if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } =
- transcriber::transcribe(&rule.rhs, &match_.bindings);
+ transcriber::transcribe(&rule.rhs, &match_.bindings, marker);
ExpandResult { value, err: match_.err.or(transcribe_err) }
} else {
ExpandResult::new(
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
+ tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] },
ExpandError::NoMatchingRule,
)
}
@@ -98,31 +100,37 @@ pub(crate) fn expand_rules(
/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
/// many is not a plain `usize`, but a `&[usize]`.
-#[derive(Debug, Default, Clone, PartialEq, Eq)]
-struct Bindings {
- inner: FxHashMap<SmolStr, Binding>,
+#[derive(Debug, Clone, PartialEq, Eq)]
+struct Bindings<S> {
+ inner: FxHashMap<SmolStr, Binding<S>>,
+}
+
+impl<S> Default for Bindings<S> {
+ fn default() -> Self {
+ Self { inner: Default::default() }
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
-enum Binding {
- Fragment(Fragment),
- Nested(Vec<Binding>),
+enum Binding<S> {
+ Fragment(Fragment<S>),
+ Nested(Vec<Binding<S>>),
Empty,
Missing(MetaVarKind),
}
#[derive(Debug, Clone, PartialEq, Eq)]
-enum Fragment {
+enum Fragment<S> {
/// token fragments are just copy-pasted into the output
- Tokens(tt::TokenTree),
+ Tokens(tt::TokenTree<S>),
/// Expr ast fragments are surrounded with `()` on insertion to preserve
/// precedence. Note that this impl is different from the one currently in
/// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
///
- /// At one point in time, we tried to to use "fake" delimiters here a-la
+ /// At one point in time, we tried to use "fake" delimiters here à la
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
/// tricky to handle in the parser, and rustc doesn't handle those either.
- Expr(tt::TokenTree),
+ Expr(tt::Subtree<S>),
/// There are roughly two types of paths: paths in expression context, where a
/// separator `::` between an identifier and its following generic argument list
/// is mandatory, and paths in type context, where `::` can be omitted.
@@ -132,5 +140,5 @@ enum Fragment {
/// and is trasncribed as an expression-context path, verbatim transcription
/// would cause a syntax error. We need to fix it up just before transcribing;
/// see `transcriber::fix_up_and_push_path_tt()`.
- Path(tt::TokenTree),
+ Path(tt::Subtree<S>),
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index 1471af98b..012b02a3f 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -63,21 +63,21 @@ use std::rc::Rc;
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
+use tt::Span;
use crate::{
expander::{Binding, Bindings, ExpandResult, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
- tt,
tt_iter::TtIter,
ExpandError, MetaTemplate, ValueResult,
};
-impl Bindings {
+impl<S: Span> Bindings<S> {
fn push_optional(&mut self, name: &SmolStr) {
// FIXME: Do we have a better way to represent an empty token ?
// Insert an empty subtree for empty token
let tt =
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into();
+ tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into();
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
}
@@ -85,14 +85,14 @@ impl Bindings {
self.inner.insert(name.clone(), Binding::Empty);
}
- fn bindings(&self) -> impl Iterator<Item = &Binding> {
+ fn bindings(&self) -> impl Iterator<Item = &Binding<S>> {
self.inner.values()
}
}
-#[derive(Clone, Debug, Default, PartialEq, Eq)]
-pub(super) struct Match {
- pub(super) bindings: Bindings,
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(super) struct Match<S> {
+ pub(super) bindings: Bindings<S>,
/// We currently just keep the first error and count the rest to compare matches.
pub(super) err: Option<ExpandError>,
pub(super) err_count: usize,
@@ -102,7 +102,19 @@ pub(super) struct Match {
pub(super) bound_count: usize,
}
-impl Match {
+impl<S> Default for Match<S> {
+ fn default() -> Self {
+ Self {
+ bindings: Default::default(),
+ err: Default::default(),
+ err_count: Default::default(),
+ unmatched_tts: Default::default(),
+ bound_count: Default::default(),
+ }
+ }
+}
+
+impl<S> Match<S> {
fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take();
self.err = prev_err.or(Some(err));
@@ -111,12 +123,16 @@ impl Match {
}
/// Matching errors are added to the `Match`.
-pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match {
+pub(super) fn match_<S: Span>(
+ pattern: &MetaTemplate<S>,
+ input: &tt::Subtree<S>,
+ is_2021: bool,
+) -> Match<S> {
let mut res = match_loop(pattern, input, is_2021);
res.bound_count = count(res.bindings.bindings());
return res;
- fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
+ fn count<'a, S: 'a>(bindings: impl Iterator<Item = &'a Binding<S>>) -> usize {
bindings
.map(|it| match it {
Binding::Fragment(_) => 1,
@@ -129,10 +145,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool)
}
#[derive(Debug, Clone)]
-enum BindingKind {
+enum BindingKind<S> {
Empty(SmolStr),
Optional(SmolStr),
- Fragment(SmolStr, Fragment),
+ Fragment(SmolStr, Fragment<S>),
Missing(SmolStr, MetaVarKind),
Nested(usize, usize),
}
@@ -146,13 +162,18 @@ enum LinkNode<T> {
Parent { idx: usize, len: usize },
}
-#[derive(Default)]
-struct BindingsBuilder {
- nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
+struct BindingsBuilder<S> {
+ nodes: Vec<Vec<LinkNode<Rc<BindingKind<S>>>>>,
nested: Vec<Vec<LinkNode<usize>>>,
}
-impl BindingsBuilder {
+impl<S> Default for BindingsBuilder<S> {
+ fn default() -> Self {
+ Self { nodes: Default::default(), nested: Default::default() }
+ }
+}
+
+impl<S: Span> BindingsBuilder<S> {
fn alloc(&mut self) -> BindingsIdx {
let idx = self.nodes.len();
self.nodes.push(Vec::new());
@@ -189,7 +210,7 @@ impl BindingsBuilder {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
}
- fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) {
+ fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment<S>) {
self.nodes[idx.0]
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
}
@@ -210,11 +231,11 @@ impl BindingsBuilder {
idx.0 = new_idx;
}
- fn build(self, idx: &BindingsIdx) -> Bindings {
+ fn build(self, idx: &BindingsIdx) -> Bindings<S> {
self.build_inner(&self.nodes[idx.0])
}
- fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind>>]) -> Bindings {
+ fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind<S>>>]) -> Bindings<S> {
let mut bindings = Bindings::default();
let mut nodes = Vec::new();
self.collect_nodes(link_nodes, &mut nodes);
@@ -264,7 +285,7 @@ impl BindingsBuilder {
&'a self,
id: usize,
len: usize,
- nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind>>]>,
+ nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind<S>>>]>,
) {
self.nested[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
@@ -272,7 +293,7 @@ impl BindingsBuilder {
});
}
- fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) {
+ fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings<S>>) {
let last = &self.nodes[idx];
let mut nested_refs: Vec<&[_]> = Vec::new();
self.nested[nested_idx].iter().for_each(|it| match *it {
@@ -283,7 +304,7 @@ impl BindingsBuilder {
nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
}
- fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) {
+ fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind<S>>) {
self.nodes[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
@@ -292,8 +313,8 @@ impl BindingsBuilder {
fn collect_nodes<'a>(
&'a self,
- link_nodes: &'a [LinkNode<Rc<BindingKind>>],
- nodes: &mut Vec<&'a BindingKind>,
+ link_nodes: &'a [LinkNode<Rc<BindingKind<S>>>],
+ nodes: &mut Vec<&'a BindingKind<S>>,
) {
link_nodes.iter().for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
@@ -303,22 +324,22 @@ impl BindingsBuilder {
}
#[derive(Debug, Clone)]
-struct MatchState<'t> {
+struct MatchState<'t, S> {
/// The position of the "dot" in this matcher
- dot: OpDelimitedIter<'t>,
+ dot: OpDelimitedIter<'t, S>,
/// Token subtree stack
/// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
/// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
/// that where the bottom of the stack is the outermost matcher.
- stack: SmallVec<[OpDelimitedIter<'t>; 4]>,
+ stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>,
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
/// before we enter the repetition.
- up: Option<Box<MatchState<'t>>>,
+ up: Option<Box<MatchState<'t, S>>>,
/// The separator if we are in a repetition.
- sep: Option<Separator>,
+ sep: Option<Separator<S>>,
/// The KleeneOp of this sequence if we are in a repetition.
sep_kind: Option<RepeatKind>,
@@ -330,7 +351,7 @@ struct MatchState<'t> {
bindings: BindingsIdx,
/// Cached result of meta variable parsing
- meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
+ meta_result: Option<(TtIter<'t, S>, ExpandResult<Option<Fragment<S>>>)>,
/// Is error occurred in this state, will `poised` to "parent"
is_error: bool,
@@ -355,16 +376,16 @@ struct MatchState<'t> {
/// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `error_items`: the set of items in errors, used for error-resilient parsing
#[inline]
-fn match_loop_inner<'t>(
- src: TtIter<'t>,
- stack: &[TtIter<'t>],
- res: &mut Match,
- bindings_builder: &mut BindingsBuilder,
- cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
- bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
- next_items: &mut Vec<MatchState<'t>>,
- eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
- error_items: &mut SmallVec<[MatchState<'t>; 1]>,
+fn match_loop_inner<'t, S: Span>(
+ src: TtIter<'t, S>,
+ stack: &[TtIter<'t, S>],
+ res: &mut Match<S>,
+ bindings_builder: &mut BindingsBuilder<S>,
+ cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ next_items: &mut Vec<MatchState<'t, S>>,
+ eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
+ error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
is_2021: bool,
) {
macro_rules! try_push {
@@ -468,7 +489,7 @@ fn match_loop_inner<'t>(
if let Ok(subtree) = src.clone().expect_subtree() {
if subtree.delimiter.kind == delimiter.kind {
item.stack.push(item.dot);
- item.dot = tokens.iter_delimited(Some(delimiter));
+ item.dot = tokens.iter_delimited(Some(*delimiter));
cur_items.push(item);
}
}
@@ -587,9 +608,9 @@ fn match_loop_inner<'t>(
}
}
-fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match {
+fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
let mut src = TtIter::new(src);
- let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
+ let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
let mut res = Match::default();
let mut error_recover_item = None;
@@ -736,16 +757,16 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match
}
}
-fn match_meta_var(
+fn match_meta_var<S: Span>(
kind: MetaVarKind,
- input: &mut TtIter<'_>,
+ input: &mut TtIter<'_, S>,
is_2021: bool,
-) -> ExpandResult<Option<Fragment>> {
+) -> ExpandResult<Option<Fragment<S>>> {
let fragment = match kind {
MetaVarKind::Path => {
return input
.expect_fragment(parser::PrefixEntryPoint::Path)
- .map(|it| it.map(Fragment::Path));
+ .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path));
}
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
@@ -771,9 +792,21 @@ fn match_meta_var(
}
_ => {}
};
- return input
- .expect_fragment(parser::PrefixEntryPoint::Expr)
- .map(|tt| tt.map(Fragment::Expr));
+ return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
+ tt.map(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![leaf.into()],
+ },
+ tt::TokenTree::Subtree(mut s) => {
+ if s.delimiter.kind == tt::DelimiterKind::Invisible {
+ s.delimiter.kind = tt::DelimiterKind::Parenthesis;
+ }
+ s
+ }
+ })
+ .map(Fragment::Expr)
+ });
}
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind {
@@ -796,7 +829,7 @@ fn match_meta_var(
match neg {
None => lit.into(),
Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![neg, lit.into()],
}),
}
@@ -811,7 +844,7 @@ fn match_meta_var(
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
}
-fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
+fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate<S>) {
for op in pattern.iter() {
match op {
Op::Var { name, .. } => collector_fun(name.clone()),
@@ -824,38 +857,38 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
}
}
}
-impl MetaTemplate {
- fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
+impl<S: Span> MetaTemplate<S> {
+ fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> {
OpDelimitedIter {
inner: &self.0,
idx: 0,
- delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED),
+ delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE),
}
}
}
#[derive(Debug, Clone, Copy)]
-enum OpDelimited<'a> {
- Op(&'a Op),
+enum OpDelimited<'a, S> {
+ Op(&'a Op<S>),
Open,
Close,
}
#[derive(Debug, Clone, Copy)]
-struct OpDelimitedIter<'a> {
- inner: &'a [Op],
- delimited: &'a tt::Delimiter,
+struct OpDelimitedIter<'a, S> {
+ inner: &'a [Op<S>],
+ delimited: tt::Delimiter<S>,
idx: usize,
}
-impl<'a> OpDelimitedIter<'a> {
+impl<'a, S: Span> OpDelimitedIter<'a, S> {
fn is_eof(&self) -> bool {
let len = self.inner.len()
+ if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
self.idx >= len
}
- fn peek(&self) -> Option<OpDelimited<'a>> {
+ fn peek(&self) -> Option<OpDelimited<'a, S>> {
match self.delimited.kind {
tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op),
_ => match self.idx {
@@ -871,8 +904,8 @@ impl<'a> OpDelimitedIter<'a> {
}
}
-impl<'a> Iterator for OpDelimitedIter<'a> {
- type Item = OpDelimited<'a>;
+impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> {
+ type Item = OpDelimited<'a, S>;
fn next(&mut self) -> Option<Self::Item> {
let res = self.peek();
@@ -888,8 +921,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
-impl TtIter<'_> {
- fn expect_separator(&mut self, separator: &Separator) -> bool {
+impl<S: Span> TtIter<'_, S> {
+ fn expect_separator(&mut self, separator: &Separator<S>) -> bool {
let mut fork = self.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
@@ -919,7 +952,7 @@ impl TtIter<'_> {
ok
}
- fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
+ fn expect_tt(&mut self) -> Result<tt::TokenTree<S>, ()> {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) {
if punct.char == '\'' {
self.expect_lifetime()
@@ -927,7 +960,7 @@ impl TtIter<'_> {
let puncts = self.expect_glued_punct()?;
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees,
}))
}
@@ -936,7 +969,7 @@ impl TtIter<'_> {
}
}
- fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
+ fn expect_lifetime(&mut self) -> Result<tt::TokenTree<S>, ()> {
let punct = self.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
@@ -944,7 +977,7 @@ impl TtIter<'_> {
let ident = self.expect_ident_or_underscore()?;
Ok(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![
tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(),
@@ -953,7 +986,7 @@ impl TtIter<'_> {
.into())
}
- fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
+ fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<S>> {
let mut fork = self.clone();
match fork.expect_char(c) {
Ok(_) => {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index cdac2f1e3..7a3e8653c 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -2,31 +2,29 @@
//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
use syntax::SmolStr;
+use tt::{Delimiter, Span};
use crate::{
expander::{Binding, Bindings, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
- tt::{self, Delimiter},
CountError, ExpandError, ExpandResult, MetaTemplate,
};
-impl Bindings {
- fn contains(&self, name: &str) -> bool {
- self.inner.contains_key(name)
- }
-
- fn get(&self, name: &str) -> Result<&Binding, ExpandError> {
+impl<S: Span> Bindings<S> {
+ fn get(&self, name: &str) -> Result<&Binding<S>, ExpandError> {
match self.inner.get(name) {
Some(binding) => Ok(binding),
- None => Err(ExpandError::binding_error(format!("could not find binding `{name}`"))),
+ None => Err(ExpandError::UnresolvedBinding(Box::new(Box::from(name)))),
}
}
fn get_fragment(
&self,
name: &str,
+ mut span: S,
nesting: &mut [NestingState],
- ) -> Result<Fragment, ExpandError> {
+ marker: impl Fn(&mut S),
+ ) -> Result<Fragment<S>, ExpandError> {
macro_rules! binding_err {
($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
}
@@ -48,54 +46,75 @@ impl Bindings {
};
}
match b {
- Binding::Fragment(it) => Ok(it.clone()),
- // emit some reasonable default expansion for missing bindings,
- // this gives better recovery than emitting the `$fragment-name` verbatim
- Binding::Missing(it) => Ok(match it {
- MetaVarKind::Stmt => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
- span: tt::TokenId::unspecified(),
- char: ';',
- spacing: tt::Spacing::Alone,
- })))
- }
- MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ Binding::Fragment(f @ (Fragment::Path(sub) | Fragment::Expr(sub))) => {
+ let tt::Subtree { delimiter, token_trees } = sub;
+ marker(&mut span);
+ let subtree = tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
- kind: tt::DelimiterKind::Brace,
+ // FIXME split span
+ open: span,
+ close: span,
+ kind: delimiter.kind,
},
- token_trees: vec![],
- })),
- // FIXME: Meta and Item should get proper defaults
- MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
- Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: token_trees.clone(),
+ };
+ Ok(match f {
+ Fragment::Tokens(_) => unreachable!(),
+ Fragment::Expr(_) => Fragment::Expr,
+ Fragment::Path(_) => Fragment::Path,
+ }(subtree))
+ }
+ Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()),
+ // emit some reasonable default expansion for missing bindings,
+ // this gives better recovery than emitting the `$fragment-name` verbatim
+ Binding::Missing(it) => Ok({
+ marker(&mut span);
+ match it {
+ MetaVarKind::Stmt => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ span,
+ char: ';',
+ spacing: tt::Spacing::Alone,
+ })))
+ }
+ MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: span,
+ close: span,
+ kind: tt::DelimiterKind::Brace,
+ },
token_trees: vec![],
- }))
- }
- MetaVarKind::Path
- | MetaVarKind::Ty
- | MetaVarKind::Pat
- | MetaVarKind::PatParam
- | MetaVarKind::Expr
- | MetaVarKind::Ident => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("missing"),
- span: tt::TokenId::unspecified(),
- })))
- }
- MetaVarKind::Lifetime => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("'missing"),
- span: tt::TokenId::unspecified(),
- })))
- }
- MetaVarKind::Literal => {
- Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- text: SmolStr::new_inline("\"missing\""),
- span: tt::TokenId::unspecified(),
- })))
+ })),
+ // FIXME: Meta and Item should get proper defaults
+ MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
+ Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: vec![],
+ }))
+ }
+ MetaVarKind::Path
+ | MetaVarKind::Ty
+ | MetaVarKind::Pat
+ | MetaVarKind::PatParam
+ | MetaVarKind::Expr
+ | MetaVarKind::Ident => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("missing"),
+ span,
+ })))
+ }
+ MetaVarKind::Lifetime => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("'missing"),
+ span,
+ })))
+ }
+ MetaVarKind::Literal => {
+ Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+ text: SmolStr::new_inline("\"missing\""),
+ span,
+ })))
+ }
}
}),
Binding::Nested(_) => {
@@ -108,13 +127,14 @@ impl Bindings {
}
}
-pub(super) fn transcribe(
- template: &MetaTemplate,
- bindings: &Bindings,
-) -> ExpandResult<tt::Subtree> {
+pub(super) fn transcribe<S: Span>(
+ template: &MetaTemplate<S>,
+ bindings: &Bindings<S>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<tt::Subtree<S>> {
let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
- let mut arena: Vec<tt::TokenTree> = Vec::new();
- expand_subtree(&mut ctx, template, None, &mut arena)
+ let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
+ expand_subtree(&mut ctx, template, None, &mut arena, marker)
}
#[derive(Debug)]
@@ -129,50 +149,75 @@ struct NestingState {
}
#[derive(Debug)]
-struct ExpandCtx<'a> {
- bindings: &'a Bindings,
+struct ExpandCtx<'a, S> {
+ bindings: &'a Bindings<S>,
nesting: Vec<NestingState>,
}
-fn expand_subtree(
- ctx: &mut ExpandCtx<'_>,
- template: &MetaTemplate,
- delimiter: Option<Delimiter>,
- arena: &mut Vec<tt::TokenTree>,
-) -> ExpandResult<tt::Subtree> {
+fn expand_subtree<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ template: &MetaTemplate<S>,
+ delimiter: Option<Delimiter<S>>,
+ arena: &mut Vec<tt::TokenTree<S>>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<tt::Subtree<S>> {
// remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
let start_elements = arena.len();
let mut err = None;
'ops: for op in template.iter() {
match op {
- Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()),
- Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()),
+ Op::Literal(it) => arena.push(
+ tt::Leaf::from({
+ let mut it = it.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ ),
+ Op::Ident(it) => arena.push(
+ tt::Leaf::from({
+ let mut it = it.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ ),
Op::Punct(puncts) => {
for punct in puncts {
- arena.push(tt::Leaf::from(*punct).into());
+ arena.push(
+ tt::Leaf::from({
+ let mut it = punct.clone();
+ marker(&mut it.span);
+ it
+ })
+ .into(),
+ );
}
}
Op::Subtree { tokens, delimiter } => {
+ let mut delimiter = *delimiter;
+ marker(&mut delimiter.open);
+ marker(&mut delimiter.close);
let ExpandResult { value: tt, err: e } =
- expand_subtree(ctx, tokens, Some(*delimiter), arena);
+ expand_subtree(ctx, tokens, Some(delimiter), arena, marker);
err = err.or(e);
arena.push(tt.into());
}
Op::Var { name, id, .. } => {
- let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
+ let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
err = err.or(e);
push_fragment(arena, fragment);
}
Op::Repeat { tokens: subtree, kind, separator } => {
let ExpandResult { value: fragment, err: e } =
- expand_repeat(ctx, subtree, *kind, separator, arena);
+ expand_repeat(ctx, subtree, *kind, separator, arena, marker);
err = err.or(e);
push_fragment(arena, fragment)
}
Op::Ignore { name, id } => {
// Expand the variable, but ignore the result. This registers the repetition count.
// FIXME: Any emitted errors are dropped.
- expand_var(ctx, name, *id);
+ expand_var(ctx, name, *id, marker);
}
Op::Index { depth } => {
let index =
@@ -180,7 +225,8 @@ fn expand_subtree(
arena.push(
tt::Leaf::Literal(tt::Literal {
text: index.to_string().into(),
- span: tt::TokenId::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -239,7 +285,8 @@ fn expand_subtree(
arena.push(
tt::Leaf::Literal(tt::Literal {
text: c.to_string().into(),
- span: tt::TokenId::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -250,60 +297,70 @@ fn expand_subtree(
let tts = arena.drain(start_elements..).collect();
ExpandResult {
value: tt::Subtree {
- delimiter: delimiter.unwrap_or_else(tt::Delimiter::unspecified),
+ delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible),
token_trees: tts,
},
err,
}
}
-fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> {
+fn expand_var<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ v: &SmolStr,
+ id: S,
+ marker: impl Fn(&mut S),
+) -> ExpandResult<Fragment<S>> {
// We already handle $crate case in mbe parser
debug_assert!(v != "crate");
- if !ctx.bindings.contains(v) {
- // Note that it is possible to have a `$var` inside a macro which is not bound.
- // For example:
- // ```
- // macro_rules! foo {
- // ($a:ident, $b:ident, $c:tt) => {
- // macro_rules! bar {
- // ($bi:ident) => {
- // fn $bi() -> u8 {$c}
- // }
- // }
- // }
- // ```
- // We just treat it a normal tokens
- let tt = tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: vec![
- tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
- .into(),
- tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
- ],
+ match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
+ Ok(it) => ExpandResult::ok(it),
+ Err(ExpandError::UnresolvedBinding(_)) => {
+ // Note that it is possible to have a `$var` inside a macro which is not bound.
+ // For example:
+ // ```
+ // macro_rules! foo {
+ // ($a:ident, $b:ident, $c:tt) => {
+ // macro_rules! bar {
+ // ($bi:ident) => {
+ // fn $bi() -> u8 {$c}
+ // }
+ // }
+ // }
+ // ```
+ // We just treat it a normal tokens
+ let tt = tt::Subtree {
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
+ token_trees: vec![
+ tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
+ .into(),
+ tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
+ ],
+ }
+ .into();
+ ExpandResult::ok(Fragment::Tokens(tt))
}
- .into();
- ExpandResult::ok(Fragment::Tokens(tt))
- } else {
- ctx.bindings.get_fragment(v, &mut ctx.nesting).map_or_else(
- |e| ExpandResult {
- value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty())),
- err: Some(e),
- },
- ExpandResult::ok,
- )
+ Err(e) => ExpandResult {
+ value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
+ // FIXME
+ open: S::DUMMY,
+ // FIXME
+ close: S::DUMMY,
+ }))),
+ err: Some(e),
+ },
}
}
-fn expand_repeat(
- ctx: &mut ExpandCtx<'_>,
- template: &MetaTemplate,
+fn expand_repeat<S: Span>(
+ ctx: &mut ExpandCtx<'_, S>,
+ template: &MetaTemplate<S>,
kind: RepeatKind,
- separator: &Option<Separator>,
- arena: &mut Vec<tt::TokenTree>,
-) -> ExpandResult<Fragment> {
- let mut buf: Vec<tt::TokenTree> = Vec::new();
+ separator: &Option<Separator<S>>,
+ arena: &mut Vec<tt::TokenTree<S>>,
+ marker: impl Fn(&mut S) + Copy,
+) -> ExpandResult<Fragment<S>> {
+ let mut buf: Vec<tt::TokenTree<S>> = Vec::new();
ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
// Dirty hack to make macro-expansion terminate.
// This should be replaced by a proper macro-by-example implementation
@@ -313,7 +370,8 @@ fn expand_repeat(
let mut err = None;
loop {
- let ExpandResult { value: mut t, err: e } = expand_subtree(ctx, template, None, arena);
+ let ExpandResult { value: mut t, err: e } =
+ expand_subtree(ctx, template, None, arena, marker);
let nesting_state = ctx.nesting.last_mut().unwrap();
if nesting_state.at_end || !nesting_state.hit {
break;
@@ -330,8 +388,11 @@ fn expand_repeat(
);
return ExpandResult {
value: Fragment::Tokens(
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }
- .into(),
+ tt::Subtree {
+ delimiter: tt::Delimiter::dummy_invisible(),
+ token_trees: vec![],
+ }
+ .into(),
),
err: Some(ExpandError::LimitExceeded),
};
@@ -342,7 +403,7 @@ fn expand_repeat(
continue;
}
- t.delimiter = tt::Delimiter::unspecified();
+ t.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
push_subtree(&mut buf, t);
if let Some(sep) = separator {
@@ -376,7 +437,7 @@ fn expand_repeat(
// Check if it is a single token subtree without any delimiter
// e.g {Delimiter:None> ['>'] /Delimiter:None>}
- let tt = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: buf }.into();
+ let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into();
if RepeatKind::OneOrMore == kind && counter == 0 {
return ExpandResult {
@@ -387,25 +448,18 @@ fn expand_repeat(
ExpandResult { value: Fragment::Tokens(tt), err }
}
-fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
+fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
match fragment {
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
- Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => {
- if tt.delimiter.kind == tt::DelimiterKind::Invisible {
- tt.delimiter = tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
- kind: tt::DelimiterKind::Parenthesis,
- };
- }
- buf.push(tt.into())
+ Fragment::Expr(sub) => {
+ push_subtree(buf, sub);
}
- Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt),
- Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt),
+ Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt),
+ Fragment::Tokens(tt) => buf.push(tt),
}
}
-fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
+fn push_subtree<S>(buf: &mut Vec<tt::TokenTree<S>>, tt: tt::Subtree<S>) {
match tt.delimiter.kind {
tt::DelimiterKind::Invisible => buf.extend(tt.token_trees),
_ => buf.push(tt.into()),
@@ -415,7 +469,7 @@ fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
/// Inserts the path separator `::` between an identifier and its following generic
/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
/// we need this fixup.
-fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
+fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt::Subtree<S>) {
stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
let mut prev_was_ident = false;
// Note that we only need to fix up the top-level `TokenTree`s because the
@@ -432,7 +486,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
tt::Leaf::Punct(tt::Punct {
char: ':',
spacing: tt::Spacing::Joint,
- span: tt::Span::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -440,7 +495,8 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
tt::Leaf::Punct(tt::Punct {
char: ':',
spacing: tt::Spacing::Alone,
- span: tt::Span::unspecified(),
+ // FIXME
+ span: S::DUMMY,
})
.into(),
);
@@ -453,9 +509,9 @@ fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
/// defined by the metavar expression.
-fn count(
- ctx: &ExpandCtx<'_>,
- binding: &Binding,
+fn count<S>(
+ ctx: &ExpandCtx<'_, S>,
+ binding: &Binding<S>,
our_depth: usize,
count_depth: Option<usize>,
) -> Result<usize, CountError> {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index 9d886a1c9..933179858 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -3,10 +3,10 @@
//! interface, although it contains some code to bridge `SyntaxNode`s and
//! `TokenTree`s as well!
//!
-//! The tes for this functionality live in another crate:
+//! The tests for this functionality live in another crate:
//! `hir_def::macro_expansion_tests::mbe`.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod parser;
mod expander;
@@ -18,8 +18,8 @@ mod to_parser_input;
mod benchmark;
mod token_map;
-use ::tt::token_id as tt;
use stdx::impl_from;
+use tt::Span;
use std::fmt;
@@ -28,19 +28,21 @@ use crate::{
tt_iter::TtIter,
};
-pub use self::tt::{Delimiter, DelimiterKind, Punct};
+// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use ::parser::TopEntryPoint;
+pub use tt::{Delimiter, DelimiterKind, Punct, SyntaxContext};
pub use crate::{
syntax_bridge::{
- parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map,
- syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree,
- syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
- SyntheticTokenId,
+ parse_exprs_with_sep, parse_to_token_tree, parse_to_token_tree_static_span,
+ syntax_node_to_token_tree, syntax_node_to_token_tree_modified, token_tree_to_syntax_node,
+ SpanMapper,
},
- token_map::TokenMap,
+ token_map::SpanMap,
};
+pub use crate::syntax_bridge::dummy_test_span_utils::*;
+
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum ParseError {
UnexpectedToken(Box<str>),
@@ -73,6 +75,7 @@ impl fmt::Display for ParseError {
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum ExpandError {
BindingError(Box<Box<str>>),
+ UnresolvedBinding(Box<Box<str>>),
LeftoverTokens,
ConversionError,
LimitExceeded,
@@ -95,6 +98,10 @@ impl fmt::Display for ExpandError {
ExpandError::NoMatchingRule => f.write_str("no rule matches input tokens"),
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
ExpandError::BindingError(e) => f.write_str(e),
+ ExpandError::UnresolvedBinding(binding) => {
+ f.write_str("could not find binding ")?;
+ f.write_str(binding)
+ }
ExpandError::ConversionError => f.write_str("could not convert tokens"),
ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
@@ -124,10 +131,8 @@ impl fmt::Display for CountError {
/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct DeclarativeMacro {
- rules: Box<[Rule]>,
- /// Highest id of the token we have in TokenMap
- shift: Shift,
+pub struct DeclarativeMacro<S> {
+ rules: Box<[Rule<S>]>,
// This is used for correctly determining the behavior of the pat fragment
// FIXME: This should be tracked by hygiene of the fragment identifier!
is_2021: bool,
@@ -135,96 +140,18 @@ pub struct DeclarativeMacro {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-struct Rule {
- lhs: MetaTemplate,
- rhs: MetaTemplate,
+struct Rule<S> {
+ lhs: MetaTemplate<S>,
+ rhs: MetaTemplate<S>,
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct Shift(u32);
-
-impl Shift {
- pub fn new(tt: &tt::Subtree) -> Shift {
- // Note that TokenId is started from zero,
- // We have to add 1 to prevent duplication.
- let value = max_id(tt).map_or(0, |it| it + 1);
- return Shift(value);
-
- // Find the max token id inside a subtree
- fn max_id(subtree: &tt::Subtree) -> Option<u32> {
- let filter =
- |tt: &_| match tt {
- tt::TokenTree::Subtree(subtree) => {
- let tree_id = max_id(subtree);
- if subtree.delimiter.open != tt::TokenId::unspecified() {
- Some(tree_id.map_or(subtree.delimiter.open.0, |t| {
- t.max(subtree.delimiter.open.0)
- }))
- } else {
- tree_id
- }
- }
- tt::TokenTree::Leaf(leaf) => {
- let &(tt::Leaf::Ident(tt::Ident { span, .. })
- | tt::Leaf::Punct(tt::Punct { span, .. })
- | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf;
-
- (span != tt::TokenId::unspecified()).then_some(span.0)
- }
- };
- subtree.token_trees.iter().filter_map(filter).max()
- }
- }
-
- /// Shift given TokenTree token id
- pub fn shift_all(self, tt: &mut tt::Subtree) {
- for t in &mut tt.token_trees {
- match t {
- tt::TokenTree::Leaf(
- tt::Leaf::Ident(tt::Ident { span, .. })
- | tt::Leaf::Punct(tt::Punct { span, .. })
- | tt::Leaf::Literal(tt::Literal { span, .. }),
- ) => *span = self.shift(*span),
- tt::TokenTree::Subtree(tt) => {
- tt.delimiter.open = self.shift(tt.delimiter.open);
- tt.delimiter.close = self.shift(tt.delimiter.close);
- self.shift_all(tt)
- }
- }
- }
- }
-
- pub fn shift(self, id: tt::TokenId) -> tt::TokenId {
- if id == tt::TokenId::unspecified() {
- id
- } else {
- tt::TokenId(id.0 + self.0)
- }
- }
-
- pub fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
- id.0.checked_sub(self.0).map(tt::TokenId)
- }
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Origin {
- Def,
- Call,
-}
-
-impl DeclarativeMacro {
- pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro {
- DeclarativeMacro {
- rules: Box::default(),
- shift: Shift(0),
- is_2021,
- err: Some(Box::new(err)),
- }
+impl<S: Span> DeclarativeMacro<S> {
+ pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro<S> {
+ DeclarativeMacro { rules: Box::default(), is_2021, err: Some(Box::new(err)) }
}
/// The old, `macro_rules! m {}` flavor.
- pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
+ pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
// Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier.
@@ -256,11 +183,11 @@ impl DeclarativeMacro {
}
}
- DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
+ DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
}
/// The new, unstable `macro m {}` flavor.
- pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
+ pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
let mut err = None;
@@ -307,36 +234,24 @@ impl DeclarativeMacro {
}
}
- DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
- }
-
- pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
- self.shift.shift_all(&mut tt);
- expander::expand_rules(&self.rules, &tt, self.is_2021)
+ DeclarativeMacro { rules: rules.into_boxed_slice(), is_2021, err }
}
pub fn err(&self) -> Option<&ParseError> {
self.err.as_deref()
}
- pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
- self.shift.shift(id)
- }
-
- pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
- match self.shift.unshift(id) {
- Some(id) => (id, Origin::Call),
- None => (id, Origin::Def),
- }
- }
-
- pub fn shift(&self) -> Shift {
- self.shift
+ pub fn expand(
+ &self,
+ tt: &tt::Subtree<S>,
+ marker: impl Fn(&mut S) + Copy,
+ ) -> ExpandResult<tt::Subtree<S>> {
+ expander::expand_rules(&self.rules, &tt, marker, self.is_2021)
}
}
-impl Rule {
- fn parse(src: &mut TtIter<'_>, expect_arrow: bool) -> Result<Self, ParseError> {
+impl<S: Span> Rule<S> {
+ fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result<Self, ParseError> {
let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
if expect_arrow {
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
@@ -351,7 +266,7 @@ impl Rule {
}
}
-fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
+fn validate<S: Span>(pattern: &MetaTemplate<S>) -> Result<(), ParseError> {
for op in pattern.iter() {
match op {
Op::Subtree { tokens, .. } => validate(tokens)?,
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
index 7a143e746..00ba35377 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -3,8 +3,9 @@
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
+use tt::Span;
-use crate::{tt, tt_iter::TtIter, ParseError};
+use crate::{tt_iter::TtIter, ParseError};
/// Consider
///
@@ -20,22 +21,22 @@ use crate::{tt, tt_iter::TtIter, ParseError};
/// Stuff to the right is a [`MetaTemplate`] template which is used to produce
/// output.
#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) struct MetaTemplate(pub(crate) Box<[Op]>);
+pub(crate) struct MetaTemplate<S>(pub(crate) Box<[Op<S>]>);
-impl MetaTemplate {
- pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+impl<S: Span> MetaTemplate<S> {
+ pub(crate) fn parse_pattern(pattern: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(pattern, Mode::Pattern)
}
- pub(crate) fn parse_template(template: &tt::Subtree) -> Result<MetaTemplate, ParseError> {
+ pub(crate) fn parse_template(template: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(template, Mode::Template)
}
- pub(crate) fn iter(&self) -> impl Iterator<Item = &Op> {
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &Op<S>> {
self.0.iter()
}
- fn parse(tt: &tt::Subtree, mode: Mode) -> Result<MetaTemplate, ParseError> {
+ fn parse(tt: &tt::Subtree<S>, mode: Mode) -> Result<Self, ParseError> {
let mut src = TtIter::new(tt);
let mut res = Vec::new();
@@ -49,16 +50,16 @@ impl MetaTemplate {
}
#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) enum Op {
- Var { name: SmolStr, kind: Option<MetaVarKind>, id: tt::TokenId },
- Ignore { name: SmolStr, id: tt::TokenId },
+pub(crate) enum Op<S> {
+ Var { name: SmolStr, kind: Option<MetaVarKind>, id: S },
+ Ignore { name: SmolStr, id: S },
Index { depth: usize },
Count { name: SmolStr, depth: Option<usize> },
- Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
- Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter },
- Literal(tt::Literal),
- Punct(SmallVec<[tt::Punct; 3]>),
- Ident(tt::Ident),
+ Repeat { tokens: MetaTemplate<S>, kind: RepeatKind, separator: Option<Separator<S>> },
+ Subtree { tokens: MetaTemplate<S>, delimiter: tt::Delimiter<S> },
+ Literal(tt::Literal<S>),
+ Punct(SmallVec<[tt::Punct<S>; 3]>),
+ Ident(tt::Ident<S>),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -87,15 +88,15 @@ pub(crate) enum MetaVarKind {
}
#[derive(Clone, Debug, Eq)]
-pub(crate) enum Separator {
- Literal(tt::Literal),
- Ident(tt::Ident),
- Puncts(SmallVec<[tt::Punct; 3]>),
+pub(crate) enum Separator<S> {
+ Literal(tt::Literal<S>),
+ Ident(tt::Ident<S>),
+ Puncts(SmallVec<[tt::Punct<S>; 3]>),
}
// Note that when we compare a Separator, we just care about its textual value.
-impl PartialEq for Separator {
- fn eq(&self, other: &Separator) -> bool {
+impl<S> PartialEq for Separator<S> {
+ fn eq(&self, other: &Separator<S>) -> bool {
use Separator::*;
match (self, other) {
@@ -117,11 +118,11 @@ enum Mode {
Template,
}
-fn next_op(
- first_peeked: &tt::TokenTree,
- src: &mut TtIter<'_>,
+fn next_op<S: Span>(
+ first_peeked: &tt::TokenTree<S>,
+ src: &mut TtIter<'_, S>,
mode: Mode,
-) -> Result<Op, ParseError> {
+) -> Result<Op<S>, ParseError> {
let res = match first_peeked {
tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
src.next().expect("first token already peeked");
@@ -212,7 +213,10 @@ fn next_op(
Ok(res)
}
-fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<MetaVarKind>, ParseError> {
+fn eat_fragment_kind<S: Span>(
+ src: &mut TtIter<'_, S>,
+ mode: Mode,
+) -> Result<Option<MetaVarKind>, ParseError> {
if let Mode::Pattern = mode {
src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?;
let ident = src
@@ -240,11 +244,13 @@ fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result<Option<MetaVarK
Ok(None)
}
-fn is_boolean_literal(lit: &tt::Literal) -> bool {
+fn is_boolean_literal<S>(lit: &tt::Literal<S>) -> bool {
matches!(lit.text.as_str(), "true" | "false")
}
-fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind), ParseError> {
+fn parse_repeat<S: Span>(
+ src: &mut TtIter<'_, S>,
+) -> Result<(Option<Separator<S>>, RepeatKind), ParseError> {
let mut separator = Separator::Puncts(SmallVec::new());
for tt in src {
let tt = match tt {
@@ -281,7 +287,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option<Separator>, RepeatKind),
Err(ParseError::InvalidRepeat)
}
-fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
+fn parse_metavar_expr<S: Span>(src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> {
let func = src.expect_ident()?;
let args = src.expect_subtree()?;
@@ -314,7 +320,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
Ok(op)
}
-fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
+fn parse_depth<S: Span>(src: &mut TtIter<'_, S>) -> Result<usize, ()> {
if src.len() == 0 {
Ok(0)
} else if let tt::Leaf::Literal(lit) = src.expect_literal()? {
@@ -325,7 +331,7 @@ fn parse_depth(src: &mut TtIter<'_>) -> Result<usize, ()> {
}
}
-fn try_eat_comma(src: &mut TtIter<'_>) -> bool {
+fn try_eat_comma<S: Span>(src: &mut TtIter<'_, S>) -> bool {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))) = src.peek_n(0) {
let _ = src.next();
return true;
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index 7b9bb61e6..b89bfd74a 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -1,98 +1,102 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
-use rustc_hash::FxHashMap;
-use stdx::{always, non_empty_vec::NonEmptyVec};
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::{never, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
};
-
-use crate::{
- to_parser_input::to_parser_input,
- tt::{
- self,
- buffer::{Cursor, TokenBuffer},
- },
- tt_iter::TtIter,
- TokenMap,
+use tt::{
+ buffer::{Cursor, TokenBuffer},
+ Span, SpanData, SyntaxContext,
};
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, SpanMap};
+
#[cfg(test)]
mod tests;
-/// Convert the syntax node to a `TokenTree` (what macro
-/// will consume).
-pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
- let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
- node,
- Default::default(),
- 0,
- Default::default(),
- Default::default(),
- );
- (subtree, token_map)
+pub trait SpanMapper<S: Span> {
+ fn span_for(&self, range: TextRange) -> S;
}
-/// Convert the syntax node to a `TokenTree` (what macro will consume)
-/// with the censored range excluded.
-pub fn syntax_node_to_token_tree_with_modifications(
- node: &SyntaxNode,
- existing_token_map: TokenMap,
- next_id: u32,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
-) -> (tt::Subtree, TokenMap, u32) {
- let global_offset = node.text_range().start();
- let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
- let subtree = convert_tokens(&mut c);
- c.id_alloc.map.shrink_to_fit();
- always!(c.replace.is_empty(), "replace: {:?}", c.replace);
- always!(c.append.is_empty(), "append: {:?}", c.append);
- (subtree, c.id_alloc.map, c.id_alloc.next_id)
+impl<S: Span> SpanMapper<S> for SpanMap<S> {
+ fn span_for(&self, range: TextRange) -> S {
+ self.span_at(range.start())
+ }
}
-/// Convert the syntax node to a `TokenTree` (what macro
-/// will consume).
-pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap {
- syntax_node_to_token_map_with_modifications(
- node,
- Default::default(),
- 0,
- Default::default(),
- Default::default(),
- )
- .0
+impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
+ fn span_for(&self, range: TextRange) -> S {
+ SM::span_for(self, range)
+ }
}
-/// Convert the syntax node to a `TokenTree` (what macro will consume)
-/// with the censored range excluded.
-pub fn syntax_node_to_token_map_with_modifications(
- node: &SyntaxNode,
- existing_token_map: TokenMap,
- next_id: u32,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
-) -> (TokenMap, u32) {
- let global_offset = node.text_range().start();
- let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
- collect_tokens(&mut c);
- c.id_alloc.map.shrink_to_fit();
- always!(c.replace.is_empty(), "replace: {:?}", c.replace);
- always!(c.append.is_empty(), "append: {:?}", c.append);
- (c.id_alloc.map, c.id_alloc.next_id)
+/// Dummy things for testing where spans don't matter.
+pub(crate) mod dummy_test_span_utils {
+ use super::*;
+
+ pub type DummyTestSpanData = tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>;
+ pub const DUMMY: DummyTestSpanData = DummyTestSpanData::DUMMY;
+
+ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+ pub struct DummyTestSpanAnchor;
+ impl tt::SpanAnchor for DummyTestSpanAnchor {
+ const DUMMY: Self = DummyTestSpanAnchor;
+ }
+ #[derive(Debug, Copy, Clone, PartialEq, Eq)]
+ pub struct DummyTestSyntaxContext;
+ impl SyntaxContext for DummyTestSyntaxContext {
+ const DUMMY: Self = DummyTestSyntaxContext;
+ }
+
+ pub struct DummyTestSpanMap;
+
+ impl SpanMapper<tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext>> for DummyTestSpanMap {
+ fn span_for(
+ &self,
+ range: syntax::TextRange,
+ ) -> tt::SpanData<DummyTestSpanAnchor, DummyTestSyntaxContext> {
+ tt::SpanData { range, anchor: DummyTestSpanAnchor, ctx: DummyTestSyntaxContext }
+ }
+ }
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct SyntheticTokenId(pub u32);
+/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
+/// subtree's spans.
+pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
+ node: &SyntaxNode,
+ map: SpanMap,
+) -> tt::Subtree<SpanData<Anchor, Ctx>>
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+ SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+{
+ let mut c = Converter::new(node, map, Default::default(), Default::default());
+ convert_tokens(&mut c)
+}
-#[derive(Debug, Clone)]
-pub struct SyntheticToken {
- pub kind: SyntaxKind,
- pub text: SmolStr,
- pub range: TextRange,
- pub id: SyntheticTokenId,
+/// Converts a syntax tree to a [`tt::Subtree`] using the provided span map to populate the
+/// subtree's spans. Additionally using the append and remove parameters, the additional tokens can
+/// be injected or hidden from the output.
+pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>(
+ node: &SyntaxNode,
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Anchor, Ctx>>>>,
+ remove: FxHashSet<SyntaxNode>,
+) -> tt::Subtree<SpanData<Anchor, Ctx>>
+where
+ SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
+ let mut c = Converter::new(node, map, append, remove);
+ convert_tokens(&mut c)
}
// The following items are what `rustc` macro can be parsed into :
@@ -107,10 +111,17 @@ pub struct SyntheticToken {
// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
-pub fn token_tree_to_syntax_node(
- tt: &tt::Subtree,
+/// Converts a [`tt::Subtree`] back to a [`SyntaxNode`].
+/// The produced `SpanMap` contains a mapping from the syntax nodes offsets to the subtree's spans.
+pub fn token_tree_to_syntax_node<Anchor, Ctx>(
+ tt: &tt::Subtree<SpanData<Anchor, Ctx>>,
entry_point: parser::TopEntryPoint,
-) -> (Parse<SyntaxNode>, TokenMap) {
+) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>)
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
let buffer = match tt {
tt::Subtree {
delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. },
@@ -137,29 +148,41 @@ pub fn token_tree_to_syntax_node(
tree_sink.finish()
}
-/// Convert a string to a `TokenTree`
-pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
+/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
+/// anchor with the given context.
+pub fn parse_to_token_tree<Anchor, Ctx>(
+ anchor: Anchor,
+ ctx: Ctx,
+ text: &str,
+) -> Option<tt::Subtree<SpanData<Anchor, Ctx>>>
+where
+ SpanData<Anchor, Ctx>: Span,
+ Anchor: Copy,
+ Ctx: SyntaxContext,
+{
let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() {
return None;
}
+ let mut conv = RawConverter { lexed, pos: 0, anchor, ctx };
+ Some(convert_tokens(&mut conv))
+}
- let mut conv = RawConverter {
- lexed,
- pos: 0,
- id_alloc: TokenIdAlloc {
- map: Default::default(),
- global_offset: TextSize::default(),
- next_id: 0,
- },
- };
-
- let subtree = convert_tokens(&mut conv);
- Some((subtree, conv.id_alloc.map))
+/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
+pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
+where
+ S: Span,
+{
+ let lexed = parser::LexedStr::new(text);
+ if lexed.errors().next().is_some() {
+ return None;
+ }
+ let mut conv = StaticRawConverter { lexed, pos: 0, span };
+ Some(convert_tokens(&mut conv))
}
/// Split token tree with separate expr: $($e:expr)SEP*
-pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
+pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> {
if tt.token_trees.is_empty() {
return Vec::new();
}
@@ -172,10 +195,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res.push(match expanded.value {
None => break,
- Some(tt @ tt::TokenTree::Leaf(_)) => {
- tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] }
- }
- Some(tt::TokenTree::Subtree(tt)) => tt,
+ Some(tt) => tt.subtree_or_wrap(),
});
let mut fork = iter.clone();
@@ -187,7 +207,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
if iter.peek_n(0).is_some() {
res.push(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: iter.cloned().collect(),
});
}
@@ -195,136 +215,118 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res
}
-fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
- struct StackEntry {
- subtree: tt::Subtree,
- idx: usize,
- open_range: TextRange,
- }
-
- let entry = StackEntry {
- subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
- // never used (delimiter is `None`)
- idx: !0,
- open_range: TextRange::empty(TextSize::of('.')),
- };
+fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
+where
+ C: TokenConverter<S>,
+ S: Span,
+{
+ let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] };
let mut stack = NonEmptyVec::new(entry);
- loop {
- let StackEntry { subtree, .. } = stack.last_mut();
- let result = &mut subtree.token_trees;
- let (token, range) = match conv.bump() {
- Some(it) => it,
- None => break,
- };
- let synth_id = token.synthetic_id(conv);
-
- let kind = token.kind(conv);
- if kind == COMMENT {
- // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
- // figure out which token id to use for the doc comment, if it is converted successfully.
- let next_id = conv.id_alloc().peek_next_id();
- if let Some(tokens) = conv.convert_doc_comment(&token, next_id) {
- let id = conv.id_alloc().alloc(range, synth_id);
- debug_assert_eq!(id, next_id);
- result.extend(tokens);
- }
- continue;
- }
- let tt = if kind.is_punct() && kind != UNDERSCORE {
- if synth_id.is_none() {
- assert_eq!(range.len(), TextSize::of('.'));
- }
-
- let expected = match subtree.delimiter.kind {
- tt::DelimiterKind::Parenthesis => Some(T![')']),
- tt::DelimiterKind::Brace => Some(T!['}']),
- tt::DelimiterKind::Bracket => Some(T![']']),
- tt::DelimiterKind::Invisible => None,
- };
-
- if let Some(expected) = expected {
- if kind == expected {
- if let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, Some(range));
- stack.last_mut().subtree.token_trees.push(entry.subtree.into());
+ while let Some((token, abs_range)) = conv.bump() {
+ let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
+
+ let tt = match token.as_leaf() {
+ Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
+ None => match token.kind(conv) {
+ // Desugar doc comments into doc attributes
+ COMMENT => {
+ let span = conv.span_for(abs_range);
+ if let Some(tokens) = conv.convert_doc_comment(&token, span) {
+ result.extend(tokens);
}
continue;
}
- }
-
- let delim = match kind {
- T!['('] => Some(tt::DelimiterKind::Parenthesis),
- T!['{'] => Some(tt::DelimiterKind::Brace),
- T!['['] => Some(tt::DelimiterKind::Bracket),
- _ => None,
- };
+ kind if kind.is_punct() && kind != UNDERSCORE => {
+ let expected = match delimiter.kind {
+ tt::DelimiterKind::Parenthesis => Some(T![')']),
+ tt::DelimiterKind::Brace => Some(T!['}']),
+ tt::DelimiterKind::Bracket => Some(T![']']),
+ tt::DelimiterKind::Invisible => None,
+ };
+
+ // Current token is a closing delimiter that we expect, fix up the closing span
+ // and end the subtree here
+ if matches!(expected, Some(expected) if expected == kind) {
+ if let Some(mut subtree) = stack.pop() {
+ subtree.delimiter.close = conv.span_for(abs_range);
+ stack.last_mut().token_trees.push(subtree.into());
+ }
+ continue;
+ }
- if let Some(kind) = delim {
- let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
- let subtree = tt::Subtree {
- delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind },
- token_trees: vec![],
- };
- stack.push(StackEntry { subtree, idx, open_range: range });
- continue;
- }
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ // Start a new subtree
+ if let Some(kind) = delim {
+ let open = conv.span_for(abs_range);
+ stack.push(tt::Subtree {
+ delimiter: tt::Delimiter {
+ open,
+ // will be overwritten on subtree close above
+ close: open,
+ kind,
+ },
+ token_trees: vec![],
+ });
+ continue;
+ }
- let spacing = match conv.peek().map(|next| next.kind(conv)) {
- Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
- _ => tt::Spacing::Alone,
- };
- let char = match token.to_char(conv) {
- Some(c) => c,
- None => {
- panic!("Token from lexer must be single char: token = {token:#?}");
+ let spacing = match conv.peek().map(|next| next.kind(conv)) {
+ Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
+ _ => tt::Spacing::Alone,
+ };
+ let Some(char) = token.to_char(conv) else {
+ panic!("Token from lexer must be single char: token = {token:#?}")
+ };
+ tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
+ .into()
}
- };
- tt::Leaf::from(tt::Punct {
- char,
- spacing,
- span: conv.id_alloc().alloc(range, synth_id),
- })
- .into()
- } else {
- macro_rules! make_leaf {
- ($i:ident) => {
- tt::$i {
- span: conv.id_alloc().alloc(range, synth_id),
- text: token.to_text(conv),
+ kind => {
+ macro_rules! make_leaf {
+ ($i:ident) => {
+ tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }
+ .into()
+ };
}
- .into()
- };
- }
- let leaf: tt::Leaf = match kind {
- T![true] | T![false] => make_leaf!(Ident),
- IDENT => make_leaf!(Ident),
- UNDERSCORE => make_leaf!(Ident),
- k if k.is_keyword() => make_leaf!(Ident),
- k if k.is_literal() => make_leaf!(Literal),
- LIFETIME_IDENT => {
- let char_unit = TextSize::of('\'');
- let r = TextRange::at(range.start(), char_unit);
- let apostrophe = tt::Leaf::from(tt::Punct {
- char: '\'',
- spacing: tt::Spacing::Joint,
- span: conv.id_alloc().alloc(r, synth_id),
- });
- result.push(apostrophe.into());
-
- let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
- let ident = tt::Leaf::from(tt::Ident {
- text: SmolStr::new(&token.to_text(conv)[1..]),
- span: conv.id_alloc().alloc(r, synth_id),
- });
- result.push(ident.into());
- continue;
- }
- _ => continue,
- };
+ let leaf: tt::Leaf<_> = match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let apostrophe = tt::Leaf::from(tt::Punct {
+ char: '\'',
+ spacing: tt::Spacing::Joint,
+ span: conv
+ .span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
+ });
+ result.push(apostrophe.into());
+
+ let ident = tt::Leaf::from(tt::Ident {
+ text: SmolStr::new(&token.to_text(conv)[1..]),
+ span: conv.span_for(TextRange::new(
+ abs_range.start() + TextSize::of('\''),
+ abs_range.end(),
+ )),
+ });
+ result.push(ident.into());
+ continue;
+ }
+ _ => continue,
+ };
- leaf.into()
+ leaf.into()
+ }
+ },
};
+
result.push(tt);
}
@@ -334,10 +336,9 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
while let Some(entry) = stack.pop() {
let parent = stack.last_mut();
- conv.id_alloc().close_delim(entry.idx, None);
- let leaf: tt::Leaf = tt::Punct {
- span: conv.id_alloc().alloc(entry.open_range, None),
- char: match entry.subtree.delimiter.kind {
+ let leaf: tt::Leaf<_> = tt::Punct {
+ span: entry.delimiter.open,
+ char: match entry.delimiter.kind {
tt::DelimiterKind::Parenthesis => '(',
tt::DelimiterKind::Brace => '{',
tt::DelimiterKind::Bracket => '[',
@@ -346,11 +347,11 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
spacing: tt::Spacing::Alone,
}
.into();
- parent.subtree.token_trees.push(leaf.into());
- parent.subtree.token_trees.extend(entry.subtree.token_trees);
+ parent.token_trees.push(leaf.into());
+ parent.token_trees.extend(entry.token_trees);
}
- let subtree = stack.into_last().subtree;
+ let subtree = stack.into_last();
if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
first.clone()
} else {
@@ -358,111 +359,6 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
}
}
-fn collect_tokens<C: TokenConverter>(conv: &mut C) {
- struct StackEntry {
- idx: usize,
- open_range: TextRange,
- delimiter: tt::DelimiterKind,
- }
-
- let entry = StackEntry {
- delimiter: tt::DelimiterKind::Invisible,
- // never used (delimiter is `None`)
- idx: !0,
- open_range: TextRange::empty(TextSize::of('.')),
- };
- let mut stack = NonEmptyVec::new(entry);
-
- loop {
- let StackEntry { delimiter, .. } = stack.last_mut();
- let (token, range) = match conv.bump() {
- Some(it) => it,
- None => break,
- };
- let synth_id = token.synthetic_id(conv);
-
- let kind = token.kind(conv);
- if kind == COMMENT {
- // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
- // figure out which token id to use for the doc comment, if it is converted successfully.
- let next_id = conv.id_alloc().peek_next_id();
- if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) {
- let id = conv.id_alloc().alloc(range, synth_id);
- debug_assert_eq!(id, next_id);
- }
- continue;
- }
- if kind.is_punct() && kind != UNDERSCORE {
- if synth_id.is_none() {
- assert_eq!(range.len(), TextSize::of('.'));
- }
-
- let expected = match delimiter {
- tt::DelimiterKind::Parenthesis => Some(T![')']),
- tt::DelimiterKind::Brace => Some(T!['}']),
- tt::DelimiterKind::Bracket => Some(T![']']),
- tt::DelimiterKind::Invisible => None,
- };
-
- if let Some(expected) = expected {
- if kind == expected {
- if let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, Some(range));
- }
- continue;
- }
- }
-
- let delim = match kind {
- T!['('] => Some(tt::DelimiterKind::Parenthesis),
- T!['{'] => Some(tt::DelimiterKind::Brace),
- T!['['] => Some(tt::DelimiterKind::Bracket),
- _ => None,
- };
-
- if let Some(kind) = delim {
- let (_id, idx) = conv.id_alloc().open_delim(range, synth_id);
-
- stack.push(StackEntry { idx, open_range: range, delimiter: kind });
- continue;
- }
-
- conv.id_alloc().alloc(range, synth_id);
- } else {
- macro_rules! make_leaf {
- ($i:ident) => {{
- conv.id_alloc().alloc(range, synth_id);
- }};
- }
- match kind {
- T![true] | T![false] => make_leaf!(Ident),
- IDENT => make_leaf!(Ident),
- UNDERSCORE => make_leaf!(Ident),
- k if k.is_keyword() => make_leaf!(Ident),
- k if k.is_literal() => make_leaf!(Literal),
- LIFETIME_IDENT => {
- let char_unit = TextSize::of('\'');
- let r = TextRange::at(range.start(), char_unit);
- conv.id_alloc().alloc(r, synth_id);
-
- let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
- conv.id_alloc().alloc(r, synth_id);
- continue;
- }
- _ => continue,
- };
- };
-
- // If we get here, we've consumed all input tokens.
- // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
- // Merge them so we're left with one.
- while let Some(entry) = stack.pop() {
- conv.id_alloc().close_delim(entry.idx, None);
- conv.id_alloc().alloc(entry.open_range, None);
- }
- }
-}
-
fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!(
kind,
@@ -511,162 +407,126 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
text.into()
}
-fn convert_doc_comment(
+fn convert_doc_comment<S: Copy>(
token: &syntax::SyntaxToken,
- span: tt::TokenId,
-) -> Option<Vec<tt::TokenTree>> {
+ span: S,
+) -> Option<Vec<tt::TokenTree<S>>> {
cov_mark::hit!(test_meta_doc_comments);
let comment = ast::Comment::cast(token.clone())?;
let doc = comment.kind().doc?;
- // Make `doc="\" Comments\""
- let meta_tkns =
- vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)];
+ let mk_ident =
+ |s: &str| tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }));
- // Make `#![]`
- let mut token_trees = Vec::with_capacity(3);
- token_trees.push(mk_punct('#', span));
- if let ast::CommentPlacement::Inner = doc {
- token_trees.push(mk_punct('!', span));
- }
- token_trees.push(tt::TokenTree::from(tt::Subtree {
- delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
- token_trees: meta_tkns,
- }));
-
- return Some(token_trees);
-
- // Helper functions
- fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree {
- tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }))
- }
-
- fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree {
+ let mk_punct = |c: char| {
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
char: c,
spacing: tt::Spacing::Alone,
span,
}))
- }
+ };
- fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree {
+ let mk_doc_literal = |comment: &ast::Comment| {
let lit = tt::Literal { text: doc_comment_text(comment), span };
tt::TokenTree::from(tt::Leaf::from(lit))
- }
-}
-
-struct TokenIdAlloc {
- map: TokenMap,
- global_offset: TextSize,
- next_id: u32,
-}
-
-impl TokenIdAlloc {
- fn alloc(
- &mut self,
- absolute_range: TextRange,
- synthetic_id: Option<SyntheticTokenId>,
- ) -> tt::TokenId {
- let relative_range = absolute_range - self.global_offset;
- let token_id = tt::TokenId(self.next_id);
- self.next_id += 1;
- self.map.insert(token_id, relative_range);
- if let Some(id) = synthetic_id {
- self.map.insert_synthetic(token_id, id);
- }
- token_id
- }
+ };
- fn open_delim(
- &mut self,
- open_abs_range: TextRange,
- synthetic_id: Option<SyntheticTokenId>,
- ) -> (tt::TokenId, usize) {
- let token_id = tt::TokenId(self.next_id);
- self.next_id += 1;
- let idx = self.map.insert_delim(
- token_id,
- open_abs_range - self.global_offset,
- open_abs_range - self.global_offset,
- );
- if let Some(id) = synthetic_id {
- self.map.insert_synthetic(token_id, id);
- }
- (token_id, idx)
- }
+ // Make `doc="\" Comments\""
+ let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
- fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
- match close_abs_range {
- None => {
- self.map.remove_delim(idx);
- }
- Some(close) => {
- self.map.update_close_delim(idx, close - self.global_offset);
- }
- }
+ // Make `#![]`
+ let mut token_trees = Vec::with_capacity(3);
+ token_trees.push(mk_punct('#'));
+ if let ast::CommentPlacement::Inner = doc {
+ token_trees.push(mk_punct('!'));
}
+ token_trees.push(tt::TokenTree::from(tt::Subtree {
+ delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+ token_trees: meta_tkns,
+ }));
- fn peek_next_id(&self) -> tt::TokenId {
- tt::TokenId(self.next_id)
- }
+ Some(token_trees)
}
/// A raw token (straight from lexer) converter
-struct RawConverter<'a> {
+struct RawConverter<'a, Anchor, Ctx> {
+ lexed: parser::LexedStr<'a>,
+ pos: usize,
+ anchor: Anchor,
+ ctx: Ctx,
+}
+/// A raw token (straight from lexer) converter that gives every token the same span.
+struct StaticRawConverter<'a, S> {
lexed: parser::LexedStr<'a>,
pos: usize,
- id_alloc: TokenIdAlloc,
+ span: S,
}
-trait SrcToken<Ctx>: std::fmt::Debug {
+trait SrcToken<Ctx, S>: std::fmt::Debug {
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
fn to_char(&self, ctx: &Ctx) -> Option<char>;
fn to_text(&self, ctx: &Ctx) -> SmolStr;
- fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
+ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
+ None
+ }
}
-trait TokenConverter: Sized {
- type Token: SrcToken<Self>;
+trait TokenConverter<S>: Sized {
+ type Token: SrcToken<Self, S>;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: tt::TokenId,
- ) -> Option<Vec<tt::TokenTree>>;
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
fn peek(&self) -> Option<Self::Token>;
- fn id_alloc(&mut self) -> &mut TokenIdAlloc;
+ fn span_for(&self, range: TextRange) -> S;
}
-impl SrcToken<RawConverter<'_>> for usize {
- fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
+impl<Anchor, S, Ctx> SrcToken<RawConverter<'_, Anchor, Ctx>, S> for usize {
+ fn kind(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
+ fn to_char(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
- fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
+ fn to_text(&self, ctx: &RawConverter<'_, Anchor, Ctx>) -> SmolStr {
ctx.lexed.text(*self).into()
}
+}
- fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option<SyntheticTokenId> {
- None
+impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
+ fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
+ ctx.lexed.kind(*self)
+ }
+
+ fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option<char> {
+ ctx.lexed.text(*self).chars().next()
+ }
+
+ fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
+ ctx.lexed.text(*self).into()
}
}
-impl TokenConverter for RawConverter<'_> {
+impl<Anchor: Copy, Ctx: SyntaxContext> TokenConverter<SpanData<Anchor, Ctx>>
+ for RawConverter<'_, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
type Token = usize;
- fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
+ fn convert_doc_comment(
+ &self,
+ &token: &usize,
+ span: SpanData<Anchor, Ctx>,
+ ) -> Option<Vec<tt::TokenTree<SpanData<Anchor, Ctx>>>> {
let text = self.lexed.text(token);
convert_doc_comment(&doc_comment(text), span)
}
@@ -678,7 +538,7 @@ impl TokenConverter for RawConverter<'_> {
let token = self.pos;
self.pos += 1;
let range = self.lexed.text_range(token);
- let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
Some((token, range))
}
@@ -690,137 +550,165 @@ impl TokenConverter for RawConverter<'_> {
Some(self.pos)
}
- fn id_alloc(&mut self) -> &mut TokenIdAlloc {
- &mut self.id_alloc
+ fn span_for(&self, range: TextRange) -> SpanData<Anchor, Ctx> {
+ SpanData { range, anchor: self.anchor, ctx: self.ctx }
}
}
-struct Converter {
- id_alloc: TokenIdAlloc,
+impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
+where
+ S: Span,
+{
+ type Token = usize;
+
+ fn convert_doc_comment(&self, &token: &usize, span: S) -> Option<Vec<tt::TokenTree<S>>> {
+ let text = self.lexed.text(token);
+ convert_doc_comment(&doc_comment(text), span)
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ let token = self.pos;
+ self.pos += 1;
+ let range = self.lexed.text_range(token);
+ let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?);
+
+ Some((token, range))
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ Some(self.pos)
+ }
+
+ fn span_for(&self, _: TextRange) -> S {
+ self.span
+ }
+}
+
+struct Converter<SpanMap, S> {
current: Option<SyntaxToken>,
- current_synthetic: Vec<SyntheticToken>,
+ current_leafs: Vec<tt::Leaf<S>>,
preorder: PreorderWithTokens,
- replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
+ /// Used to make the emitted text ranges in the spans relative to the span anchor.
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ remove: FxHashSet<SyntaxNode>,
}
-impl Converter {
+impl<SpanMap, S> Converter<SpanMap, S> {
fn new(
node: &SyntaxNode,
- global_offset: TextSize,
- existing_token_map: TokenMap,
- next_id: u32,
- mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- ) -> Converter {
- let range = node.text_range();
- let mut preorder = node.preorder_with_tokens();
- let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
- Converter {
- id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
- current: first,
- current_synthetic: synthetic,
- preorder,
- range,
- replace,
- append,
+ map: SpanMap,
+ append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
+ remove: FxHashSet<SyntaxNode>,
+ ) -> Self {
+ let mut this = Converter {
+ current: None,
+ preorder: node.preorder_with_tokens(),
+ range: node.text_range(),
punct_offset: None,
- }
- }
-
- fn next_token(
- preorder: &mut PreorderWithTokens,
- replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
- while let Some(ev) = preorder.next() {
- let ele = match ev {
- WalkEvent::Enter(ele) => ele,
- WalkEvent::Leave(ele) => {
- if let Some(mut v) = append.remove(&ele) {
- if !v.is_empty() {
- v.reverse();
- return (None, v);
- }
+ map,
+ append,
+ remove,
+ current_leafs: vec![],
+ };
+ let first = this.next_token();
+ this.current = first;
+ this
+ }
+
+ fn next_token(&mut self) -> Option<SyntaxToken> {
+ while let Some(ev) = self.preorder.next() {
+ match ev {
+ WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
+ WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
+ self.preorder.skip_subtree();
+ if let Some(mut v) = self.append.remove(&n.into()) {
+ v.reverse();
+ self.current_leafs.extend(v);
+ return None;
}
- continue;
}
- };
- if let Some(mut v) = replace.remove(&ele) {
- preorder.skip_subtree();
- if !v.is_empty() {
- v.reverse();
- return (None, v);
+ WalkEvent::Enter(SyntaxElement::Node(_)) => (),
+ WalkEvent::Leave(ele) => {
+ if let Some(mut v) = self.append.remove(&ele) {
+ v.reverse();
+ self.current_leafs.extend(v);
+ return None;
+ }
}
}
- match ele {
- SyntaxElement::Token(t) => return (Some(t), Vec::new()),
- _ => {}
- }
}
- (None, Vec::new())
+ None
}
}
#[derive(Debug)]
-enum SynToken {
+enum SynToken<S> {
Ordinary(SyntaxToken),
- // FIXME is this supposed to be `Punct`?
- Punch(SyntaxToken, TextSize),
- Synthetic(SyntheticToken),
+ Punct { token: SyntaxToken, offset: usize },
+ Leaf(tt::Leaf<S>),
}
-impl SynToken {
- fn token(&self) -> Option<&SyntaxToken> {
+impl<S> SynToken<S> {
+ fn token(&self) -> &SyntaxToken {
match self {
- SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
- SynToken::Synthetic(_) => None,
+ SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it,
+ SynToken::Leaf(_) => unreachable!(),
}
}
}
-impl SrcToken<Converter> for SynToken {
- fn kind(&self, ctx: &Converter) -> SyntaxKind {
+impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
+ fn kind(&self, ctx: &Converter<SpanMap, S>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
- SynToken::Punch(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
- SynToken::Synthetic(token) => token.kind,
+ SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
+ SynToken::Leaf(_) => {
+ never!();
+ SyntaxKind::ERROR
+ }
}
}
- fn to_char(&self, _ctx: &Converter) -> Option<char> {
+ fn to_char(&self, _ctx: &Converter<SpanMap, S>) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
- SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
- SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
- SynToken::Synthetic(_) => None,
+ SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
+ SynToken::Leaf(_) => None,
}
}
- fn to_text(&self, _ctx: &Converter) -> SmolStr {
+ fn to_text(&self, _ctx: &Converter<SpanMap, S>) -> SmolStr {
match self {
- SynToken::Ordinary(token) => token.text().into(),
- SynToken::Punch(token, _) => token.text().into(),
- SynToken::Synthetic(token) => token.text.clone(),
+ SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
+ SynToken::Leaf(_) => {
+ never!();
+ "".into()
+ }
}
}
-
- fn synthetic_id(&self, _ctx: &Converter) -> Option<SyntheticTokenId> {
+ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
match self {
- SynToken::Synthetic(token) => Some(token.id),
- _ => None,
+ SynToken::Ordinary(_) | SynToken::Punct { .. } => None,
+ SynToken::Leaf(it) => Some(it),
}
}
}
-impl TokenConverter for Converter {
- type Token = SynToken;
- fn convert_doc_comment(
- &self,
- token: &Self::Token,
- span: tt::TokenId,
- ) -> Option<Vec<tt::TokenTree>> {
- convert_doc_comment(token.token()?, span)
+impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
+where
+ S: Span,
+ SpanMap: SpanMapper<S>,
+{
+ type Token = SynToken<S>;
+ fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
+ convert_doc_comment(token.token(), span)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@@ -830,34 +718,31 @@ impl TokenConverter for Converter {
let range = punct.text_range();
self.punct_offset = Some((punct.clone(), offset));
let range = TextRange::at(range.start() + offset, TextSize::of('.'));
- return Some((SynToken::Punch(punct, offset), range));
+ return Some((
+ SynToken::Punct { token: punct, offset: u32::from(offset) as usize },
+ range,
+ ));
}
}
- if let Some(synth_token) = self.current_synthetic.pop() {
- if self.current_synthetic.is_empty() {
- let (new_current, new_synth) =
- Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
- self.current = new_current;
- self.current_synthetic = new_synth;
+ if let Some(leaf) = self.current_leafs.pop() {
+ if self.current_leafs.is_empty() {
+ self.current = self.next_token();
}
- let range = synth_token.range;
- return Some((SynToken::Synthetic(synth_token), range));
+ return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
}
let curr = self.current.clone()?;
if !self.range.contains_range(curr.text_range()) {
return None;
}
- let (new_current, new_synth) =
- Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
- self.current = new_current;
- self.current_synthetic = new_synth;
+
+ self.current = self.next_token();
let token = if curr.kind().is_punct() {
self.punct_offset = Some((curr.clone(), 0.into()));
let range = curr.text_range();
let range = TextRange::at(range.start(), TextSize::of('.'));
- (SynToken::Punch(curr, 0.into()), range)
+ (SynToken::Punct { token: curr, offset: 0 as usize }, range)
} else {
self.punct_offset = None;
let range = curr.text_range();
@@ -871,55 +756,55 @@ impl TokenConverter for Converter {
if let Some((punct, mut offset)) = self.punct_offset.clone() {
offset += TextSize::of('.');
if usize::from(offset) < punct.text().len() {
- return Some(SynToken::Punch(punct, offset));
+ return Some(SynToken::Punct { token: punct, offset: usize::from(offset) });
}
}
- if let Some(synth_token) = self.current_synthetic.last() {
- return Some(SynToken::Synthetic(synth_token.clone()));
- }
-
let curr = self.current.clone()?;
if !self.range.contains_range(curr.text_range()) {
return None;
}
let token = if curr.kind().is_punct() {
- SynToken::Punch(curr, 0.into())
+ SynToken::Punct { token: curr, offset: 0 as usize }
} else {
SynToken::Ordinary(curr)
};
Some(token)
}
- fn id_alloc(&mut self) -> &mut TokenIdAlloc {
- &mut self.id_alloc
+ fn span_for(&self, range: TextRange) -> S {
+ self.map.span_for(range)
}
}
-struct TtTreeSink<'a> {
+struct TtTreeSink<'a, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
buf: String,
- cursor: Cursor<'a>,
- open_delims: FxHashMap<tt::TokenId, TextSize>,
+ cursor: Cursor<'a, SpanData<Anchor, Ctx>>,
text_pos: TextSize,
inner: SyntaxTreeBuilder,
- token_map: TokenMap,
+ token_map: SpanMap<SpanData<Anchor, Ctx>>,
}
-impl<'a> TtTreeSink<'a> {
- fn new(cursor: Cursor<'a>) -> Self {
+impl<'a, Anchor, Ctx> TtTreeSink<'a, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
+ fn new(cursor: Cursor<'a, SpanData<Anchor, Ctx>>) -> Self {
TtTreeSink {
buf: String::new(),
cursor,
- open_delims: FxHashMap::default(),
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
- token_map: TokenMap::default(),
+ token_map: SpanMap::empty(),
}
}
- fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
- self.token_map.shrink_to_fit();
+ fn finish(mut self) -> (Parse<SyntaxNode>, SpanMap<SpanData<Anchor, Ctx>>) {
+ self.token_map.finish();
(self.inner.finish(), self.token_map)
}
}
@@ -936,27 +821,34 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
-impl TtTreeSink<'_> {
+impl<Anchor, Ctx> TtTreeSink<'_, Anchor, Ctx>
+where
+ SpanData<Anchor, Ctx>: Span,
+{
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
- let (text, _span) = match self.cursor.token_tree() {
+ let (text, span) = match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
(lit.text.as_str(), lit.span)
}
_ => unreachable!(),
};
+ // FIXME: Span splitting
match text.split_once('.') {
Some((left, right)) => {
assert!(!left.is_empty());
+
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, left);
self.inner.finish_node();
+ self.token_map.push(self.text_pos + TextSize::of(left), span);
// here we move the exit up, the original exit has been deleted in process
self.inner.finish_node();
self.inner.token(SyntaxKind::DOT, ".");
+ self.token_map.push(self.text_pos + TextSize::of(left) + TextSize::of("."), span);
if has_pseudo_dot {
assert!(right.is_empty(), "{left}.{right}");
@@ -964,11 +856,13 @@ impl TtTreeSink<'_> {
assert!(!right.is_empty(), "{left}.{right}");
self.inner.start_node(SyntaxKind::NAME_REF);
self.inner.token(SyntaxKind::INT_NUMBER, right);
+ self.token_map.push(self.text_pos + TextSize::of(text), span);
self.inner.finish_node();
// the parser creates an unbalanced start node, we are required to close it here
self.inner.finish_node();
}
+ self.text_pos += TextSize::of(text);
}
None => unreachable!(),
}
@@ -987,11 +881,11 @@ impl TtTreeSink<'_> {
break;
}
last = self.cursor;
- let text: &str = loop {
+ let (text, span) = loop {
break match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
// Mark the range if needed
- let (text, id) = match leaf {
+ let (text, span) = match leaf {
tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span),
tt::Leaf::Punct(punct) => {
assert!(punct.char.is_ascii());
@@ -1003,18 +897,13 @@ impl TtTreeSink<'_> {
}
tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span),
};
- let range = TextRange::at(self.text_pos, TextSize::of(text));
- self.token_map.insert(id, range);
self.cursor = self.cursor.bump();
- text
+ (text, span)
}
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
self.cursor = self.cursor.subtree().unwrap();
match delim_to_str(subtree.delimiter.kind, false) {
- Some(it) => {
- self.open_delims.insert(subtree.delimiter.open, self.text_pos);
- it
- }
+ Some(it) => (it, subtree.delimiter.open),
None => continue,
}
}
@@ -1022,21 +911,7 @@ impl TtTreeSink<'_> {
let parent = self.cursor.end().unwrap();
self.cursor = self.cursor.bump();
match delim_to_str(parent.delimiter.kind, true) {
- Some(it) => {
- if let Some(open_delim) =
- self.open_delims.get(&parent.delimiter.open)
- {
- let open_range = TextRange::at(*open_delim, TextSize::of('('));
- let close_range =
- TextRange::at(self.text_pos, TextSize::of('('));
- self.token_map.insert_delim(
- parent.delimiter.open,
- open_range,
- close_range,
- );
- }
- it
- }
+ Some(it) => (it, parent.delimiter.close),
None => continue,
}
}
@@ -1044,10 +919,12 @@ impl TtTreeSink<'_> {
};
self.buf += text;
self.text_pos += TextSize::of(text);
+ self.token_map.push(self.text_pos, span);
}
self.inner.token(kind, self.buf.as_str());
self.buf.clear();
+ // FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
// Add whitespace between adjoint puncts
let next = last.bump();
if let (
@@ -1063,6 +940,7 @@ impl TtTreeSink<'_> {
if curr.spacing == tt::Spacing::Alone && curr.char != ';' && next.char != '\'' {
self.inner.token(WHITESPACE, " ");
self.text_pos += TextSize::of(' ');
+ self.token_map.push(self.text_pos, curr.span);
}
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
index fa0125f3e..bd8187a14 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
@@ -7,21 +7,20 @@ use tt::{
Leaf, Punct, Spacing,
};
-use super::syntax_node_to_token_tree;
+use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap};
fn check_punct_spacing(fixture: &str) {
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
- let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax());
+ let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap);
let mut annotations: HashMap<_, _> = extract_annotations(fixture)
.into_iter()
.map(|(range, annotation)| {
- let token = token_map.token_by_range(range).expect("no token found");
let spacing = match annotation.as_str() {
"Alone" => Spacing::Alone,
"Joint" => Spacing::Joint,
a => panic!("unknown annotation: {a}"),
};
- (token, spacing)
+ (range, spacing)
})
.collect();
@@ -29,8 +28,12 @@ fn check_punct_spacing(fixture: &str) {
let mut cursor = buf.begin();
while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() {
- if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree {
- if let Some(expected) = annotations.remove(span) {
+ if let TokenTreeRef::Leaf(
+ Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
+ _,
+ ) = token_tree
+ {
+ if let Some(expected) = annotations.remove(range) {
assert_eq!(expected, *spacing);
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
index 051e20b3a..00a14f046 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
@@ -3,9 +3,9 @@
use syntax::{SyntaxKind, SyntaxKind::*, T};
-use crate::tt::buffer::TokenBuffer;
+use tt::{buffer::TokenBuffer, Span};
-pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
+pub(crate) fn to_parser_input<S: Span>(buffer: &TokenBuffer<'_, S>) -> parser::Input {
let mut res = parser::Input::default();
let mut current = buffer.begin();
diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
index 73a27df5d..7d15812f8 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
@@ -2,123 +2,75 @@
use std::hash::Hash;
-use parser::{SyntaxKind, T};
+use stdx::{always, itertools::Itertools};
use syntax::{TextRange, TextSize};
+use tt::Span;
-use crate::syntax_bridge::SyntheticTokenId;
-
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
-enum TokenTextRange {
- Token(TextRange),
- Delimiter(TextRange),
+/// Maps absolute text ranges for the corresponding file to the relevant span data.
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub struct SpanMap<S: Span> {
+ spans: Vec<(TextSize, S)>,
}
-impl TokenTextRange {
- fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
- match self {
- TokenTextRange::Token(it) => Some(it),
- TokenTextRange::Delimiter(it) => match kind {
- T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())),
- T!['}'] | T![')'] | T![']'] => {
- Some(TextRange::at(it.end() - TextSize::of('}'), 1.into()))
- }
- _ => None,
- },
- }
+impl<S: Span> SpanMap<S> {
+ /// Creates a new empty [`SpanMap`].
+ pub fn empty() -> Self {
+ Self { spans: Vec::new() }
}
-}
-/// Maps `tt::TokenId` to the relative range of the original token.
-#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
-pub struct TokenMap {
- /// Maps `tt::TokenId` to the *relative* source range.
- entries: Vec<(tt::TokenId, TokenTextRange)>,
- pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
-}
-
-impl TokenMap {
- pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
- let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
- TokenTextRange::Token(it) => *it == relative_range,
- TokenTextRange::Delimiter(it) => {
- let open = TextRange::at(it.start(), 1.into());
- let close = TextRange::at(it.end() - TextSize::of('}'), 1.into());
- open == relative_range || close == relative_range
- }
- })?;
- Some(token_id)
+ /// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are
+ /// in order.
+ pub fn finish(&mut self) {
+ always!(
+ self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0),
+ "spans are not in order"
+ );
+ self.spans.shrink_to_fit();
}
- pub fn ranges_by_token(
- &self,
- token_id: tt::TokenId,
- kind: SyntaxKind,
- ) -> impl Iterator<Item = TextRange> + '_ {
- self.entries
- .iter()
- .filter(move |&&(tid, _)| tid == token_id)
- .filter_map(move |(_, range)| range.by_kind(kind))
- }
-
- pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
- self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
- }
-
- pub fn first_range_by_token(
- &self,
- token_id: tt::TokenId,
- kind: SyntaxKind,
- ) -> Option<TextRange> {
- self.ranges_by_token(token_id, kind).next()
- }
-
- pub(crate) fn shrink_to_fit(&mut self) {
- self.entries.shrink_to_fit();
- self.synthetic_entries.shrink_to_fit();
- }
-
- pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
- self.entries.push((token_id, TokenTextRange::Token(relative_range)));
- }
-
- pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
- self.synthetic_entries.push((token_id, id));
- }
-
- pub(crate) fn insert_delim(
- &mut self,
- token_id: tt::TokenId,
- open_relative_range: TextRange,
- close_relative_range: TextRange,
- ) -> usize {
- let res = self.entries.len();
- let cover = open_relative_range.cover(close_relative_range);
-
- self.entries.push((token_id, TokenTextRange::Delimiter(cover)));
- res
+ /// Pushes a new span onto the [`SpanMap`].
+ pub fn push(&mut self, offset: TextSize, span: S) {
+ if cfg!(debug_assertions) {
+ if let Some(&(last_offset, _)) = self.spans.last() {
+ assert!(
+ last_offset < offset,
+ "last_offset({last_offset:?}) must be smaller than offset({offset:?})"
+ );
+ }
+ }
+ self.spans.push((offset, span));
}
- pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
- let (_, token_text_range) = &mut self.entries[idx];
- if let TokenTextRange::Delimiter(dim) = token_text_range {
- let cover = dim.cover(close_relative_range);
- *token_text_range = TokenTextRange::Delimiter(cover);
- }
+ /// Returns all [`TextRange`]s that correspond to the given span.
+ ///
+ /// Note this does a linear search through the entire backing vector.
+ pub fn ranges_with_span(&self, span: S) -> impl Iterator<Item = TextRange> + '_ {
+ // FIXME: This should ignore the syntax context!
+ self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
+ if s != span {
+ return None;
+ }
+ let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
+ Some(TextRange::new(start, end))
+ })
}
- pub(crate) fn remove_delim(&mut self, idx: usize) {
- // FIXME: This could be accidentally quadratic
- self.entries.remove(idx);
+ /// Returns the span at the given position.
+ pub fn span_at(&self, offset: TextSize) -> S {
+ let entry = self.spans.partition_point(|&(it, _)| it <= offset);
+ self.spans[entry].1
}
- pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
- self.entries.iter().filter_map(|&(tid, tr)| match tr {
- TokenTextRange::Token(range) => Some((tid, range)),
- TokenTextRange::Delimiter(_) => None,
- })
+ /// Returns the spans associated with the given range.
+ /// In other words, this will return all spans that correspond to all offsets within the given range.
+ pub fn spans_for_range(&self, range: TextRange) -> impl Iterator<Item = S> + '_ {
+ let (start, end) = (range.start(), range.end());
+ let start_entry = self.spans.partition_point(|&(it, _)| it <= start);
+ let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong?
+ (&self.spans[start_entry..][..end_entry]).iter().map(|&(_, s)| s)
}
- pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
- self.entries.retain(|&(tid, _)| id(tid));
+ pub fn iter(&self) -> impl Iterator<Item = (TextSize, S)> + '_ {
+ self.spans.iter().copied()
}
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
index 79ff8ca28..40e8a2385 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -3,16 +3,17 @@
use smallvec::{smallvec, SmallVec};
use syntax::SyntaxKind;
+use tt::Span;
-use crate::{to_parser_input::to_parser_input, tt, ExpandError, ExpandResult};
+use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
#[derive(Debug, Clone)]
-pub(crate) struct TtIter<'a> {
- pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>,
+pub(crate) struct TtIter<'a, S> {
+ pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>,
}
-impl<'a> TtIter<'a> {
- pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> {
+impl<'a, S: Span> TtIter<'a, S> {
+ pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> {
TtIter { inner: subtree.token_trees.iter() }
}
@@ -36,35 +37,35 @@ impl<'a> TtIter<'a> {
}
}
- pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> {
+ pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree<S>, ()> {
match self.next() {
Some(tt::TokenTree::Subtree(it)) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> {
+ pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
match self.next() {
Some(tt::TokenTree::Leaf(it)) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> {
+ pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) if it.text != "_" => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident, ()> {
+ pub(crate) fn expect_ident_or_underscore(&mut self) -> Result<&'a tt::Ident<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Ident(it) => Ok(it),
_ => Err(()),
}
}
- pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
+ pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf<S>, ()> {
let it = self.expect_leaf()?;
match it {
tt::Leaf::Literal(_) => Ok(it),
@@ -73,7 +74,7 @@ impl<'a> TtIter<'a> {
}
}
- pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> {
+ pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct<S>, ()> {
match self.expect_leaf()? {
tt::Leaf::Punct(it) => Ok(it),
_ => Err(()),
@@ -84,7 +85,7 @@ impl<'a> TtIter<'a> {
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
- pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct; 3]>, ()> {
+ pub(crate) fn expect_glued_punct(&mut self) -> Result<SmallVec<[tt::Punct<S>; 3]>, ()> {
let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else {
return Err(());
};
@@ -126,11 +127,10 @@ impl<'a> TtIter<'a> {
pub(crate) fn expect_fragment(
&mut self,
entry_point: parser::PrefixEntryPoint,
- ) -> ExpandResult<Option<tt::TokenTree>> {
+ ) -> ExpandResult<Option<tt::TokenTree<S>>> {
let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer);
let tree_traversal = entry_point.parse(&parser_input);
-
let mut cursor = buffer.begin();
let mut error = false;
for step in tree_traversal.iter() {
@@ -162,32 +162,30 @@ impl<'a> TtIter<'a> {
let mut curr = buffer.begin();
let mut res = vec![];
- if cursor.is_root() {
- while curr != cursor {
- let Some(token) = curr.token_tree() else { break };
- res.push(token.cloned());
- curr = curr.bump();
- }
+ while curr != cursor {
+ let Some(token) = curr.token_tree() else { break };
+ res.push(token.cloned());
+ curr = curr.bump();
}
self.inner = self.inner.as_slice()[res.len()..].iter();
let res = match res.len() {
0 | 1 => res.pop(),
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter::unspecified(),
+ delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: res,
})),
};
ExpandResult { value: res, err }
}
- pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> {
+ pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
}
-impl<'a> Iterator for TtIter<'a> {
- type Item = &'a tt::TokenTree;
+impl<'a, S> Iterator for TtIter<'a, S> {
+ type Item = &'a tt::TokenTree<S>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
@@ -197,4 +195,4 @@ impl<'a> Iterator for TtIter<'a> {
}
}
-impl std::iter::ExactSizeIterator for TtIter<'_> {}
+impl<S> std::iter::ExactSizeIterator for TtIter<'_, S> {}
diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml
index 09e62c352..efb326323 100644
--- a/src/tools/rust-analyzer/crates/parser/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
drop_bomb = "0.1.5"
-rustc_lexer.workspace = true
+rustc-dependencies.workspace = true
limit.workspace = true
@@ -22,3 +22,6 @@ expect-test = "1.4.0"
stdx.workspace = true
sourcegen.workspace = true
+
+[features]
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/parser/src/event.rs b/src/tools/rust-analyzer/crates/parser/src/event.rs
index 577eb0967..e38571dd3 100644
--- a/src/tools/rust-analyzer/crates/parser/src/event.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/event.rs
@@ -2,11 +2,6 @@
//! It is intended to be completely decoupled from the
//! parser, so as to allow to evolve the tree representation
//! and the parser algorithm independently.
-//!
-//! The `TreeSink` trait is the bridge between the parser and the
-//! tree builder: the parser produces a stream of events like
-//! `start node`, `finish node`, and `FileBuilder` converts
-//! this stream to a real tree.
use std::mem;
use crate::{
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
index 6a2a9adce..19da297b5 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -376,6 +376,16 @@ fn error_block(p: &mut Parser<'_>, message: &str) {
m.complete(p, ERROR);
}
+// test_err top_level_let
+// let ref foo: fn() = 1 + 3;
+fn error_let_stmt(p: &mut Parser<'_>, message: &str) {
+ assert!(p.at(T![let]));
+ let m = p.start();
+ p.error(message);
+ expressions::let_stmt(p, expressions::Semicolon::Optional);
+ m.complete(p, ERROR);
+}
+
/// The `parser` passed this is required to at least consume one token if it returns `true`.
/// If the `parser` returns false, parsing will stop.
fn delimited(
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
index 1cbd16632..e346ece2f 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
@@ -59,7 +59,8 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
attributes::outer_attrs(p);
if p.at(T![let]) {
- let_stmt(p, m, semicolon);
+ let_stmt(p, semicolon);
+ m.complete(p, LET_STMT);
return;
}
@@ -109,54 +110,53 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
m.complete(p, EXPR_STMT);
}
}
+}
- // test let_stmt
- // fn f() { let x: i32 = 92; }
- fn let_stmt(p: &mut Parser<'_>, m: Marker, with_semi: Semicolon) {
- p.bump(T![let]);
- patterns::pattern(p);
- if p.at(T![:]) {
- // test let_stmt_ascription
- // fn f() { let x: i32; }
- types::ascription(p);
- }
+// test let_stmt
+// fn f() { let x: i32 = 92; }
+pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) {
+ p.bump(T![let]);
+ patterns::pattern(p);
+ if p.at(T![:]) {
+ // test let_stmt_ascription
+ // fn f() { let x: i32; }
+ types::ascription(p);
+ }
- let mut expr_after_eq: Option<CompletedMarker> = None;
- if p.eat(T![=]) {
- // test let_stmt_init
- // fn f() { let x = 92; }
- expr_after_eq = expressions::expr(p);
- }
+ let mut expr_after_eq: Option<CompletedMarker> = None;
+ if p.eat(T![=]) {
+ // test let_stmt_init
+ // fn f() { let x = 92; }
+ expr_after_eq = expressions::expr(p);
+ }
- if p.at(T![else]) {
- // test_err let_else_right_curly_brace
- // fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
- if let Some(expr) = expr_after_eq {
- if BlockLike::is_blocklike(expr.kind()) {
- p.error(
- "right curly brace `}` before `else` in a `let...else` statement not allowed",
- )
- }
+ if p.at(T![else]) {
+ // test_err let_else_right_curly_brace
+ // fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
+ if let Some(expr) = expr_after_eq {
+ if BlockLike::is_blocklike(expr.kind()) {
+ p.error(
+ "right curly brace `}` before `else` in a `let...else` statement not allowed",
+ )
}
-
- // test let_else
- // fn f() { let Some(x) = opt else { return }; }
- let m = p.start();
- p.bump(T![else]);
- block_expr(p);
- m.complete(p, LET_ELSE);
}
- match with_semi {
- Semicolon::Forbidden => (),
- Semicolon::Optional => {
- p.eat(T![;]);
- }
- Semicolon::Required => {
- p.expect(T![;]);
- }
+ // test let_else
+ // fn f() { let Some(x) = opt else { return }; }
+ let m = p.start();
+ p.bump(T![else]);
+ block_expr(p);
+ m.complete(p, LET_ELSE);
+ }
+
+ match with_semi {
+ Semicolon::Forbidden => (),
+ Semicolon::Optional => {
+ p.eat(T![;]);
+ }
+ Semicolon::Required => {
+ p.expect(T![;]);
}
- m.complete(p, LET_STMT);
}
}
@@ -693,6 +693,17 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// We permit `.. }` on the left-hand side of a destructuring assignment.
if !p.at(T!['}']) {
expr(p);
+
+ if p.at(T![,]) {
+ // test_err comma_after_functional_update_syntax
+ // fn foo() {
+ // S { ..x, };
+ // S { ..x, a: 0 }
+ // }
+
+ // Do not bump, so we can support additional fields after this comma.
+ p.error("cannot use a comma after the base struct");
+ }
}
}
T!['{'] => {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
index 4e850b1f7..34fd3420f 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -79,6 +79,7 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) {
e.complete(p, ERROR);
}
EOF | T!['}'] => p.error("expected an item"),
+ T![let] => error_let_stmt(p, "expected an item"),
_ => p.err_and_bump("expected an item"),
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
index 74eae9151..846da28cb 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/params.rs
@@ -7,6 +7,9 @@ use super::*;
// fn b(x: i32) {}
// fn c(x: i32, ) {}
// fn d(x: i32, y: ()) {}
+
+// test_err empty_param_slot
+// fn f(y: i32, ,t: i32) {}
pub(super) fn param_list_fn_def(p: &mut Parser<'_>) {
list_(p, Flavor::FnDef);
}
@@ -71,7 +74,11 @@ fn list_(p: &mut Parser<'_>, flavor: Flavor) {
if !p.at_ts(PARAM_FIRST.union(ATTRIBUTE_FIRST)) {
p.error("expected value parameter");
m.abandon(p);
- break;
+ if p.eat(T![,]) {
+ continue;
+ } else {
+ break;
+ }
}
param(p, m, flavor);
if !p.at(T![,]) {
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
index 36c52953a..b9e7566fd 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -8,8 +8,12 @@
//! Note that these tokens, unlike the tokens we feed into the parser, do
//! include info about comments and whitespace.
+use rustc_dependencies::lexer as rustc_lexer;
+
use std::ops;
+use rustc_lexer::unescape::{EscapeError, Mode};
+
use crate::{
SyntaxKind::{self, *},
T,
@@ -253,30 +257,60 @@ impl<'a> Converter<'a> {
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
err = "Missing trailing `'` symbol to terminate the character literal";
+ } else {
+ let text = &self.res.text[self.offset + 1..][..len - 1];
+ let i = text.rfind('\'').unwrap();
+ let text = &text[..i];
+ if let Err(e) = rustc_lexer::unescape::unescape_char(text) {
+ err = error_to_diagnostic_message(e, Mode::Char);
+ }
}
CHAR
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
err = "Missing trailing `'` symbol to terminate the byte literal";
+ } else {
+ let text = &self.res.text[self.offset + 2..][..len - 2];
+ let i = text.rfind('\'').unwrap();
+ let text = &text[..i];
+ if let Err(e) = rustc_lexer::unescape::unescape_byte(text) {
+ err = error_to_diagnostic_message(e, Mode::Byte);
+ }
}
+
BYTE
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
err = "Missing trailing `\"` symbol to terminate the string literal";
+ } else {
+ let text = &self.res.text[self.offset + 1..][..len - 1];
+ let i = text.rfind('"').unwrap();
+ let text = &text[..i];
+ err = unescape_string_error_message(text, Mode::Str);
}
STRING
}
rustc_lexer::LiteralKind::ByteStr { terminated } => {
if !terminated {
err = "Missing trailing `\"` symbol to terminate the byte string literal";
+ } else {
+ let text = &self.res.text[self.offset + 2..][..len - 2];
+ let i = text.rfind('"').unwrap();
+ let text = &text[..i];
+ err = unescape_string_error_message(text, Mode::ByteStr);
}
BYTE_STRING
}
rustc_lexer::LiteralKind::CStr { terminated } => {
if !terminated {
err = "Missing trailing `\"` symbol to terminate the string literal";
+ } else {
+ let text = &self.res.text[self.offset + 2..][..len - 2];
+ let i = text.rfind('"').unwrap();
+ let text = &text[..i];
+ err = unescape_string_error_message(text, Mode::CStr);
}
C_STRING
}
@@ -304,3 +338,64 @@ impl<'a> Converter<'a> {
self.push(syntax_kind, len, err);
}
}
+
+fn error_to_diagnostic_message(error: EscapeError, mode: Mode) -> &'static str {
+ match error {
+ EscapeError::ZeroChars => "empty character literal",
+ EscapeError::MoreThanOneChar => "character literal may only contain one codepoint",
+ EscapeError::LoneSlash => "",
+ EscapeError::InvalidEscape if mode == Mode::Byte || mode == Mode::ByteStr => {
+ "unknown byte escape"
+ }
+ EscapeError::InvalidEscape => "unknown character escape",
+ EscapeError::BareCarriageReturn => "",
+ EscapeError::BareCarriageReturnInRawString => "",
+ EscapeError::EscapeOnlyChar if mode == Mode::Byte => "byte constant must be escaped",
+ EscapeError::EscapeOnlyChar => "character constant must be escaped",
+ EscapeError::TooShortHexEscape => "numeric character escape is too short",
+ EscapeError::InvalidCharInHexEscape => "invalid character in numeric character escape",
+ EscapeError::OutOfRangeHexEscape => "out of range hex escape",
+ EscapeError::NoBraceInUnicodeEscape => "incorrect unicode escape sequence",
+ EscapeError::InvalidCharInUnicodeEscape => "invalid character in unicode escape",
+ EscapeError::EmptyUnicodeEscape => "empty unicode escape",
+ EscapeError::UnclosedUnicodeEscape => "unterminated unicode escape",
+ EscapeError::LeadingUnderscoreUnicodeEscape => "invalid start of unicode escape",
+ EscapeError::OverlongUnicodeEscape => "overlong unicode escape",
+ EscapeError::LoneSurrogateUnicodeEscape => "invalid unicode character escape",
+ EscapeError::OutOfRangeUnicodeEscape => "invalid unicode character escape",
+ EscapeError::UnicodeEscapeInByte => "unicode escape in byte string",
+ EscapeError::NonAsciiCharInByte if mode == Mode::Byte => {
+ "non-ASCII character in byte literal"
+ }
+ EscapeError::NonAsciiCharInByte if mode == Mode::ByteStr => {
+ "non-ASCII character in byte string literal"
+ }
+ EscapeError::NonAsciiCharInByte => "non-ASCII character in raw byte string literal",
+ EscapeError::UnskippedWhitespaceWarning => "",
+ EscapeError::MultipleSkippedLinesWarning => "",
+ }
+}
+
+fn unescape_string_error_message(text: &str, mode: Mode) -> &'static str {
+ let mut error_message = "";
+ match mode {
+ Mode::CStr => {
+ rustc_lexer::unescape::unescape_c_string(text, mode, &mut |_, res| {
+ if let Err(e) = res {
+ error_message = error_to_diagnostic_message(e, mode);
+ }
+ });
+ }
+ Mode::ByteStr | Mode::Str => {
+ rustc_lexer::unescape::unescape_literal(text, mode, &mut |_, res| {
+ if let Err(e) = res {
+ error_message = error_to_diagnostic_message(e, mode);
+ }
+ });
+ }
+ _ => {
+ // Other Modes are not supported yet or do not apply
+ }
+ }
+ error_message
+}
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index c155e8aaf..d9b3f46f2 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -17,8 +17,9 @@
//!
//! [`Parser`]: crate::parser::Parser
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#![allow(rustdoc::private_intra_doc_links)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
mod lexed_str;
mod token_set;
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
index 2c47e3d08..57005a683 100644
--- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -32,29 +32,27 @@ impl LexedStr<'_> {
let kind = self.kind(i);
if kind.is_trivia() {
was_joint = false
+ } else if kind == SyntaxKind::IDENT {
+ let token_text = self.text(i);
+ let contextual_kw =
+ SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
+ res.push_ident(contextual_kw);
} else {
- if kind == SyntaxKind::IDENT {
- let token_text = self.text(i);
- let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
- .unwrap_or(SyntaxKind::IDENT);
- res.push_ident(contextual_kw);
- } else {
- if was_joint {
+ if was_joint {
+ res.was_joint();
+ }
+ res.push(kind);
+ // Tag the token as joint if it is float with a fractional part
+ // we use this jointness to inform the parser about what token split
+ // event to emit when we encounter a float literal in a field access
+ if kind == SyntaxKind::FLOAT_NUMBER {
+ if !self.text(i).ends_with('.') {
res.was_joint();
- }
- res.push(kind);
- // Tag the token as joint if it is float with a fractional part
- // we use this jointness to inform the parser about what token split
- // event to emit when we encounter a float literal in a field access
- if kind == SyntaxKind::FLOAT_NUMBER {
- if !self.text(i).ends_with('.') {
- res.was_joint();
- } else {
- was_joint = false;
- }
} else {
- was_joint = true;
+ was_joint = false;
}
+ } else {
+ was_joint = true;
}
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast
new file mode 100644
index 000000000..7603c9099
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rast
@@ -0,0 +1,86 @@
+BYTE "b''" error: empty character literal
+WHITESPACE "\n"
+BYTE "b'\\'" error: Missing trailing `'` symbol to terminate the byte literal
+WHITESPACE "\n"
+BYTE "b'\n'" error: byte constant must be escaped
+WHITESPACE "\n"
+BYTE "b'spam'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\x0ff'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\\"a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\na'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\ra'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\ta'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\\\a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\'a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\0a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\u{0}x'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{1F63b}}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\v'" error: unknown byte escape
+WHITESPACE "\n"
+BYTE "b'\\💩'" error: unknown byte escape
+WHITESPACE "\n"
+BYTE "b'\\●'" error: unknown byte escape
+WHITESPACE "\n"
+BYTE "b'\\\\\\r'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+BYTE "b'\\x'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\x0'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\xf'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\xa'" error: numeric character escape is too short
+WHITESPACE "\n"
+BYTE "b'\\xx'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\xы'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\x🦀'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\xtt'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+BYTE "b'\\u'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+BYTE "b'\\u[0123]'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+BYTE "b'\\u{0x}'" error: invalid character in unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{'" error: unterminated unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{0000'" error: unterminated unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{}'" error: empty unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{_0000}'" error: invalid start of unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{0000000}'" error: overlong unicode escape
+WHITESPACE "\n"
+BYTE "b'\\u{FFFFFF}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{ffffff}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{ffffff}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DC00}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DDDD}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DFFF}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{D800}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DAAA}'" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE "b'\\u{DBFF}'" error: unicode escape in byte string
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs
new file mode 100644
index 000000000..b2d06e490
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_char_literals.rs
@@ -0,0 +1,44 @@
+b''
+b'\'
+b'
+'
+b'spam'
+b'\x0ff'
+b'\"a'
+b'\na'
+b'\ra'
+b'\ta'
+b'\\a'
+b'\'a'
+b'\0a'
+b'\u{0}x'
+b'\u{1F63b}}'
+b'\v'
+b'\💩'
+b'\●'
+b'\\\r'
+b'\x'
+b'\x0'
+b'\xf'
+b'\xa'
+b'\xx'
+b'\xы'
+b'\x🦀'
+b'\xtt'
+b'\u'
+b'\u[0123]'
+b'\u{0x}'
+b'\u{'
+b'\u{0000'
+b'\u{}'
+b'\u{_0000}'
+b'\u{0000000}'
+b'\u{FFFFFF}'
+b'\u{ffffff}'
+b'\u{ffffff}'
+b'\u{DC00}'
+b'\u{DDDD}'
+b'\u{DFFF}'
+b'\u{D800}'
+b'\u{DAAA}'
+b'\u{DBFF}'
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast
new file mode 100644
index 000000000..e8d8ff8ce
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rast
@@ -0,0 +1,28 @@
+BYTE_STRING "b\"\\💩\"" error: unknown byte escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\●\"" error: unknown byte escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{_0000}\"" error: invalid start of unicode escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{0000000}\"" error: overlong unicode escape
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{FFFFFF}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{ffffff}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{ffffff}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DC00}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DDDD}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DFFF}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{D800}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DAAA}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\u{DBFF}\"" error: unicode escape in byte string
+WHITESPACE "\n"
+BYTE_STRING "b\"\\xы\"" error: invalid character in numeric character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs
new file mode 100644
index 000000000..e74847137
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/byte_strings.rs
@@ -0,0 +1,14 @@
+b"\💩"
+b"\●"
+b"\u{_0000}"
+b"\u{0000000}"
+b"\u{FFFFFF}"
+b"\u{ffffff}"
+b"\u{ffffff}"
+b"\u{DC00}"
+b"\u{DDDD}"
+b"\u{DFFF}"
+b"\u{D800}"
+b"\u{DAAA}"
+b"\u{DBFF}"
+b"\xы"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast
new file mode 100644
index 000000000..1b4424ba5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rast
@@ -0,0 +1,28 @@
+C_STRING "c\"\\💩\"" error: unknown character escape
+WHITESPACE "\n"
+C_STRING "c\"\\●\"" error: unknown character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{_0000}\"" error: invalid start of unicode escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{0000000}\"" error: overlong unicode escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{FFFFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DC00}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DDDD}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{D800}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DAAA}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\u{DBFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+C_STRING "c\"\\xы\"" error: invalid character in numeric character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs
new file mode 100644
index 000000000..1b78ffc28
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/c_strings.rs
@@ -0,0 +1,14 @@
+c"\💩"
+c"\●"
+c"\u{_0000}"
+c"\u{0000000}"
+c"\u{FFFFFF}"
+c"\u{ffffff}"
+c"\u{ffffff}"
+c"\u{DC00}"
+c"\u{DDDD}"
+c"\u{DFFF}"
+c"\u{D800}"
+c"\u{DAAA}"
+c"\u{DBFF}"
+c"\xы"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast
new file mode 100644
index 000000000..b1e1364d4
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rast
@@ -0,0 +1,92 @@
+CHAR "'hello'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "''" error: empty character literal
+WHITESPACE "\n"
+CHAR "'\n'" error: character constant must be escaped
+WHITESPACE "\n"
+CHAR "'spam'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\x0ff'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\\"a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\na'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\ra'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\ta'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\\\a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\'a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\0a'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\u{0}x'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\u{1F63b}}'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\v'" error: unknown character escape
+WHITESPACE "\n"
+CHAR "'\\💩'" error: unknown character escape
+WHITESPACE "\n"
+CHAR "'\\●'" error: unknown character escape
+WHITESPACE "\n"
+CHAR "'\\\\\\r'" error: character literal may only contain one codepoint
+WHITESPACE "\n"
+CHAR "'\\x'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\x0'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\xf'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\xa'" error: numeric character escape is too short
+WHITESPACE "\n"
+CHAR "'\\xx'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\xы'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\x🦀'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\xtt'" error: invalid character in numeric character escape
+WHITESPACE "\n"
+CHAR "'\\xff'" error: out of range hex escape
+WHITESPACE "\n"
+CHAR "'\\xFF'" error: out of range hex escape
+WHITESPACE "\n"
+CHAR "'\\x80'" error: out of range hex escape
+WHITESPACE "\n"
+CHAR "'\\u'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+CHAR "'\\u[0123]'" error: incorrect unicode escape sequence
+WHITESPACE "\n"
+CHAR "'\\u{0x}'" error: invalid character in unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{'" error: unterminated unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{0000'" error: unterminated unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{}'" error: empty unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{_0000}'" error: invalid start of unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{0000000}'" error: overlong unicode escape
+WHITESPACE "\n"
+CHAR "'\\u{FFFFFF}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{ffffff}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{ffffff}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DC00}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DDDD}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DFFF}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{D800}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DAAA}'" error: invalid unicode character escape
+WHITESPACE "\n"
+CHAR "'\\u{DBFF}'" error: invalid unicode character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs
new file mode 100644
index 000000000..291f99d80
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/char_literals.rs
@@ -0,0 +1,47 @@
+'hello'
+''
+'
+'
+'spam'
+'\x0ff'
+'\"a'
+'\na'
+'\ra'
+'\ta'
+'\\a'
+'\'a'
+'\0a'
+'\u{0}x'
+'\u{1F63b}}'
+'\v'
+'\💩'
+'\●'
+'\\\r'
+'\x'
+'\x0'
+'\xf'
+'\xa'
+'\xx'
+'\xы'
+'\x🦀'
+'\xtt'
+'\xff'
+'\xFF'
+'\x80'
+'\u'
+'\u[0123]'
+'\u{0x}'
+'\u{'
+'\u{0000'
+'\u{}'
+'\u{_0000}'
+'\u{0000000}'
+'\u{FFFFFF}'
+'\u{ffffff}'
+'\u{ffffff}'
+'\u{DC00}'
+'\u{DDDD}'
+'\u{DFFF}'
+'\u{D800}'
+'\u{DAAA}'
+'\u{DBFF}'
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast
new file mode 100644
index 000000000..0cd174720
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rast
@@ -0,0 +1,28 @@
+STRING "\"\\💩\"" error: unknown character escape
+WHITESPACE "\n"
+STRING "\"\\●\"" error: unknown character escape
+WHITESPACE "\n"
+STRING "\"\\u{_0000}\"" error: invalid start of unicode escape
+WHITESPACE "\n"
+STRING "\"\\u{0000000}\"" error: overlong unicode escape
+WHITESPACE "\n"
+STRING "\"\\u{FFFFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{ffffff}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DC00}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DDDD}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DFFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{D800}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DAAA}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\u{DBFF}\"" error: invalid unicode character escape
+WHITESPACE "\n"
+STRING "\"\\xы\"" error: invalid character in numeric character escape
+WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs
new file mode 100644
index 000000000..2499516d3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/err/strings.rs
@@ -0,0 +1,14 @@
+"\💩"
+"\●"
+"\u{_0000}"
+"\u{0000000}"
+"\u{FFFFFF}"
+"\u{ffffff}"
+"\u{ffffff}"
+"\u{DC00}"
+"\u{DDDD}"
+"\u{DFFF}"
+"\u{D800}"
+"\u{DAAA}"
+"\u{DBFF}"
+"\xы"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
index c848ac368..fd20ca57a 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rast
@@ -1,13 +1,9 @@
-BYTE "b''"
-WHITESPACE " "
BYTE "b'x'"
WHITESPACE " "
BYTE_STRING "b\"foo\""
WHITESPACE " "
BYTE_STRING "br\"\""
WHITESPACE "\n"
-BYTE "b''suf"
-WHITESPACE " "
BYTE_STRING "b\"\"ix"
WHITESPACE " "
BYTE_STRING "br\"\"br"
@@ -17,6 +13,4 @@ WHITESPACE " "
BYTE "b'\\\\'"
WHITESPACE " "
BYTE "b'\\''"
-WHITESPACE " "
-BYTE "b'hello'"
WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
index b54930f5e..65460d02c 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/byte_strings.rs
@@ -1,3 +1,3 @@
-b'' b'x' b"foo" br""
-b''suf b""ix br""br
-b'\n' b'\\' b'\'' b'hello'
+b'x' b"foo" br""
+b""ix br""br
+b'\n' b'\\' b'\''
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
index 66e58cc29..07172a4ec 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rast
@@ -4,8 +4,6 @@ CHAR "' '"
WHITESPACE " "
CHAR "'0'"
WHITESPACE " "
-CHAR "'hello'"
-WHITESPACE " "
CHAR "'\\x7f'"
WHITESPACE " "
CHAR "'\\n'"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
index 454ee0a5f..15f52c113 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/lexer/ok/chars.rs
@@ -1 +1 @@
-'x' ' ' '0' 'hello' '\x7f' '\n' '\\' '\''
+'x' ' ' '0' '\x7f' '\n' '\\' '\''
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast
new file mode 100644
index 000000000..39e35a81e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rast
@@ -0,0 +1,41 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "y"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ COMMA ","
+ WHITESPACE " "
+ COMMA ","
+ PARAM
+ IDENT_PAT
+ NAME
+ IDENT "t"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 12: expected value parameter
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs
new file mode 100644
index 000000000..0adf7b8d2
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0023_empty_param_slot.rs
@@ -0,0 +1 @@
+fn f(y: i32, ,t: i32) {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast
new file mode 100644
index 000000000..0e2fe5988
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rast
@@ -0,0 +1,66 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ RECORD_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ RECORD_EXPR_FIELD_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ DOT2 ".."
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_EXPR_FIELD
+ NAME_REF
+ IDENT "a"
+ COLON ":"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 22: cannot use a comma after the base struct
+error 38: cannot use a comma after the base struct
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs
new file mode 100644
index 000000000..14cf96719
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_comma_after_functional_update_syntax.rs
@@ -0,0 +1,4 @@
+fn foo() {
+ S { ..x, };
+ S { ..x, a: 0 }
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast
new file mode 100644
index 000000000..5ddef5f3f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rast
@@ -0,0 +1,30 @@
+SOURCE_FILE
+ ERROR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ REF_KW "ref"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ COLON ":"
+ WHITESPACE " "
+ FN_PTR_TYPE
+ FN_KW "fn"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "3"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 0: expected an item
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs
new file mode 100644
index 000000000..3d3e7dd56
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0024_top_level_let.rs
@@ -0,0 +1 @@
+let ref foo: fn() = 1 + 3;
diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs
index 88b8d0aee..db705a7b6 100644
--- a/src/tools/rust-analyzer/crates/paths/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs
@@ -1,7 +1,7 @@
//! Thin wrappers around `std::path`, distinguishing between absolute and
//! relative paths.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
borrow::Borrow,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
index 4229f2891..2cbbc9489 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -21,15 +21,19 @@ object = { version = "0.32.0", default-features = false, features = [
] }
serde.workspace = true
serde_json = { workspace = true, features = ["unbounded_depth"] }
-tracing = "0.1.37"
+tracing.workspace = true
triomphe.workspace = true
memmap2 = "0.5.4"
snap = "1.1.0"
+indexmap = "2.1.0"
# local deps
paths.workspace = true
tt.workspace = true
stdx.workspace = true
profile.workspace = true
-# Intentionally *not* depend on anything salsa-related
-# base-db.workspace = true
+text-size.workspace = true
+# Ideally this crate would not depend on salsa things, but we need span information here which wraps
+# InternIds for the syntax context
+base-db.workspace = true
+la-arena.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 1603458f7..f697ecd35 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -5,22 +5,22 @@
//! is used to provide basic infrastructure for communication between two
//! processes: Client (RA itself), Server (the external program)
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod msg;
mod process;
mod version;
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use paths::AbsPathBuf;
use std::{fmt, io, sync::Mutex};
use triomphe::Arc;
use serde::{Deserialize, Serialize};
-use ::tt::token_id as tt;
-
use crate::{
- msg::{ExpandMacro, FlatTree, PanicMessage},
+ msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS},
process::ProcMacroProcessSrv,
};
@@ -136,30 +136,47 @@ impl ProcMacro {
pub fn expand(
&self,
- subtree: &tt::Subtree,
- attr: Option<&tt::Subtree>,
+ subtree: &tt::Subtree<SpanData>,
+ attr: Option<&tt::Subtree<SpanData>>,
env: Vec<(String, String)>,
- ) -> Result<Result<tt::Subtree, PanicMessage>, ServerError> {
+ def_site: SpanData,
+ call_site: SpanData,
+ mixed_site: SpanData,
+ ) -> Result<Result<tt::Subtree<SpanData>, PanicMessage>, ServerError> {
let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version();
let current_dir = env
.iter()
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
.map(|(_, value)| value.clone());
+ let mut span_data_table = IndexSet::default();
+ let def_site = span_data_table.insert_full(def_site).0;
+ let call_site = span_data_table.insert_full(call_site).0;
+ let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro {
- macro_body: FlatTree::new(subtree, version),
+ macro_body: FlatTree::new(subtree, version, &mut span_data_table),
macro_name: self.name.to_string(),
- attributes: attr.map(|subtree| FlatTree::new(subtree, version)),
+ attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
lib: self.dylib_path.to_path_buf().into(),
env,
current_dir,
+ has_global_spans: ExpnGlobals {
+ serialize: version >= HAS_GLOBAL_SPANS,
+ def_site,
+ call_site,
+ mixed_site,
+ },
};
- let request = msg::Request::ExpandMacro(task);
- let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
+ let response = self
+ .process
+ .lock()
+ .unwrap_or_else(|e| e.into_inner())
+ .send_task(msg::Request::ExpandMacro(task))?;
+
match response {
msg::Response::ExpandMacro(it) => {
- Ok(it.map(|tree| FlatTree::to_subtree(tree, version)))
+ Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table)))
}
msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
Err(ServerError { message: "unexpected response".to_string(), io: None })
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
index 4b01643c2..1d3e45aff 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
@@ -10,14 +10,15 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::ProcMacroKind;
-pub use crate::msg::flat::FlatTree;
+pub use crate::msg::flat::{FlatTree, TokenId};
// The versions of the server protocol
pub const NO_VERSION_CHECK_VERSION: u32 = 0;
pub const VERSION_CHECK_VERSION: u32 = 1;
pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2;
+pub const HAS_GLOBAL_SPANS: u32 = 3;
-pub const CURRENT_API_VERSION: u32 = ENCODE_CLOSE_SPAN_VERSION;
+pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS;
#[derive(Debug, Serialize, Deserialize)]
pub enum Request {
@@ -59,6 +60,26 @@ pub struct ExpandMacro {
pub env: Vec<(String, String)>,
pub current_dir: Option<String>,
+ /// marker for serde skip stuff
+ #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")]
+ #[serde(default)]
+ pub has_global_spans: ExpnGlobals,
+}
+
+#[derive(Default, Debug, Serialize, Deserialize)]
+pub struct ExpnGlobals {
+ #[serde(skip_serializing)]
+ #[serde(default)]
+ pub serialize: bool,
+ pub def_site: usize,
+ pub call_site: usize,
+ pub mixed_site: usize,
+}
+
+impl ExpnGlobals {
+ fn skip_serializing_if(&self) -> bool {
+ !self.serialize
+ }
}
pub trait Message: Serialize + DeserializeOwned {
@@ -115,30 +136,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
#[cfg(test)]
mod tests {
+ use base_db::{
+ span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
+ FileId,
+ };
+ use la_arena::RawIdx;
+ use text_size::{TextRange, TextSize};
+ use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
+
use super::*;
- use crate::tt::*;
- fn fixture_token_tree() -> Subtree {
- let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() };
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into()));
- subtree
- .token_trees
- .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into()));
+ fn fixture_token_tree() -> Subtree<SpanData> {
+ let anchor = SpanAnchor {
+ file_id: FileId::from_raw(0),
+ ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)),
+ };
+ let mut subtree = Subtree {
+ delimiter: Delimiter {
+ open: SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::empty(TextSize::new(13)),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ kind: DelimiterKind::Invisible,
+ },
+ token_trees: Vec::new(),
+ };
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "struct".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(0), TextSize::of("struct")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
+ subtree.token_trees.push(TokenTree::Leaf(
+ Ident {
+ text: "Foo".into(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(5), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ }
+ .into(),
+ ));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
text: "Foo".into(),
- span: TokenId::unspecified(),
+
+ span: SpanData {
+ range: TextRange::at(TextSize::new(8), TextSize::of("Foo")),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
})));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
char: '@',
- span: TokenId::unspecified(),
+ span: SpanData {
+ range: TextRange::at(TextSize::new(11), TextSize::of('@')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
spacing: Spacing::Joint,
})));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Delimiter {
- open: TokenId(2),
- close: TokenId::UNSPECIFIED,
+ open: SpanData {
+ range: TextRange::at(TextSize::new(12), TextSize::of('{')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
+ close: SpanData {
+ range: TextRange::at(TextSize::new(13), TextSize::of('}')),
+ anchor,
+ ctx: SyntaxContextId::ROOT,
+ },
kind: DelimiterKind::Brace,
},
token_trees: vec![],
@@ -149,19 +229,26 @@ mod tests {
#[test]
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
+ let mut span_data_table = Default::default();
let task = ExpandMacro {
- macro_body: FlatTree::new(&tt, CURRENT_API_VERSION),
+ macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
lib: std::env::current_dir().unwrap(),
env: Default::default(),
current_dir: Default::default(),
+ has_global_spans: ExpnGlobals {
+ serialize: true,
+ def_site: 0,
+ call_site: 0,
+ mixed_site: 0,
+ },
};
let json = serde_json::to_string(&task).unwrap();
// println!("{}", json);
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
- assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION));
+ assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table));
}
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
index 44245336f..583571862 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
@@ -37,12 +37,26 @@
use std::collections::{HashMap, VecDeque};
+use base_db::span::SpanData;
+use indexmap::IndexSet;
use serde::{Deserialize, Serialize};
-use crate::{
- msg::ENCODE_CLOSE_SPAN_VERSION,
- tt::{self, TokenId},
-};
+use crate::msg::ENCODE_CLOSE_SPAN_VERSION;
+
+type SpanDataIndexMap = IndexSet<SpanData>;
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(pub u32);
+
+impl std::fmt::Debug for TokenId {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl tt::Span for TokenId {
+ const DUMMY: Self = TokenId(!0);
+}
#[derive(Serialize, Deserialize, Debug)]
pub struct FlatTree {
@@ -55,33 +69,38 @@ pub struct FlatTree {
}
struct SubtreeRepr {
- open: tt::TokenId,
- close: tt::TokenId,
+ open: TokenId,
+ close: TokenId,
kind: tt::DelimiterKind,
tt: [u32; 2],
}
struct LiteralRepr {
- id: tt::TokenId,
+ id: TokenId,
text: u32,
}
struct PunctRepr {
- id: tt::TokenId,
+ id: TokenId,
char: char,
spacing: tt::Spacing,
}
struct IdentRepr {
- id: tt::TokenId,
+ id: TokenId,
text: u32,
}
impl FlatTree {
- pub fn new(subtree: &tt::Subtree, version: u32) -> FlatTree {
+ pub fn new(
+ subtree: &tt::Subtree<SpanData>,
+ version: u32,
+ span_data_table: &mut SpanDataIndexMap,
+ ) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
work: VecDeque::new(),
+ span_data_table,
subtree: Vec::new(),
literal: Vec::new(),
@@ -92,7 +111,7 @@ impl FlatTree {
};
w.write(subtree);
- return FlatTree {
+ FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
write_vec(w.subtree, SubtreeRepr::write_with_close_span)
} else {
@@ -103,15 +122,44 @@ impl FlatTree {
ident: write_vec(w.ident, IdentRepr::write),
token_tree: w.token_tree,
text: w.text,
+ }
+ }
+
+ pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
+ let mut w = Writer {
+ string_table: HashMap::new(),
+ work: VecDeque::new(),
+ span_data_table: &mut (),
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
};
+ w.write(subtree);
- fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
- xs.into_iter().flat_map(f).collect()
+ FlatTree {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
+ literal: write_vec(w.literal, LiteralRepr::write),
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: write_vec(w.ident, IdentRepr::write),
+ token_tree: w.token_tree,
+ text: w.text,
}
}
- pub fn to_subtree(self, version: u32) -> tt::Subtree {
- return Reader {
+ pub fn to_subtree_resolved(
+ self,
+ version: u32,
+ span_data_table: &SpanDataIndexMap,
+ ) -> tt::Subtree<SpanData> {
+ Reader {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
} else {
@@ -122,18 +170,40 @@ impl FlatTree {
ident: read_vec(self.ident, IdentRepr::read),
token_tree: self.token_tree,
text: self.text,
+ span_data_table,
}
- .read();
+ .read()
+ }
- fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
- let mut chunks = xs.chunks_exact(N);
- let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
- assert!(chunks.remainder().is_empty());
- res
+ pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree<TokenId> {
+ Reader {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
+ literal: read_vec(self.literal, LiteralRepr::read),
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: read_vec(self.ident, IdentRepr::read),
+ token_tree: self.token_tree,
+ text: self.text,
+ span_data_table: &(),
}
+ .read()
}
}
+fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
+ let mut chunks = xs.chunks_exact(N);
+ let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
+ assert!(chunks.remainder().is_empty());
+ res
+}
+
+fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
+ xs.into_iter().flat_map(f).collect()
+}
+
impl SubtreeRepr {
fn write(self) -> [u32; 4] {
let kind = match self.kind {
@@ -152,7 +222,7 @@ impl SubtreeRepr {
3 => tt::DelimiterKind::Bracket,
other => panic!("bad kind {other}"),
};
- SubtreeRepr { open: TokenId(open), close: TokenId::UNSPECIFIED, kind, tt: [lo, len] }
+ SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] }
}
fn write_with_close_span(self) -> [u32; 5] {
let kind = match self.kind {
@@ -211,9 +281,36 @@ impl IdentRepr {
}
}
-struct Writer<'a> {
- work: VecDeque<(usize, &'a tt::Subtree)>,
+trait Span: Copy {
+ type Table;
+ fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId;
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self;
+}
+
+impl Span for TokenId {
+ type Table = ();
+ fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId {
+ token_id
+ }
+
+ fn span_for_token_id((): &Self::Table, id: TokenId) -> Self {
+ id
+ }
+}
+impl Span for SpanData {
+ type Table = IndexSet<SpanData>;
+ fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId {
+ TokenId(table.insert_full(span).0 as u32)
+ }
+ fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self {
+ *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0])
+ }
+}
+
+struct Writer<'a, 'span, S: Span> {
+ work: VecDeque<(usize, &'a tt::Subtree<S>)>,
string_table: HashMap<&'a str, u32>,
+ span_data_table: &'span mut S::Table,
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
@@ -223,15 +320,19 @@ struct Writer<'a> {
text: Vec<String>,
}
-impl<'a> Writer<'a> {
- fn write(&mut self, root: &'a tt::Subtree) {
+impl<'a, 'span, S: Span> Writer<'a, 'span, S> {
+ fn write(&mut self, root: &'a tt::Subtree<S>) {
self.enqueue(root);
while let Some((idx, subtree)) = self.work.pop_front() {
self.subtree(idx, subtree);
}
}
- fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) {
+ fn token_id_of(&mut self, span: S) -> TokenId {
+ S::token_id_of(self.span_data_table, span)
+ }
+
+ fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree<S>) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.token_trees.len();
self.token_tree.resize(first_tt + n_tt, !0);
@@ -248,22 +349,21 @@ impl<'a> Writer<'a> {
tt::Leaf::Literal(lit) => {
let idx = self.literal.len() as u32;
let text = self.intern(&lit.text);
- self.literal.push(LiteralRepr { id: lit.span, text });
+ let id = self.token_id_of(lit.span);
+ self.literal.push(LiteralRepr { id, text });
idx << 2 | 0b01
}
tt::Leaf::Punct(punct) => {
let idx = self.punct.len() as u32;
- self.punct.push(PunctRepr {
- char: punct.char,
- spacing: punct.spacing,
- id: punct.span,
- });
+ let id = self.token_id_of(punct.span);
+ self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id });
idx << 2 | 0b10
}
tt::Leaf::Ident(ident) => {
let idx = self.ident.len() as u32;
let text = self.intern(&ident.text);
- self.ident.push(IdentRepr { id: ident.span, text });
+ let id = self.token_id_of(ident.span);
+ self.ident.push(IdentRepr { id, text });
idx << 2 | 0b11
}
},
@@ -273,10 +373,10 @@ impl<'a> Writer<'a> {
}
}
- fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
+ fn enqueue(&mut self, subtree: &'a tt::Subtree<S>) -> u32 {
let idx = self.subtree.len();
- let open = subtree.delimiter.open;
- let close = subtree.delimiter.close;
+ let open = self.token_id_of(subtree.delimiter.open);
+ let close = self.token_id_of(subtree.delimiter.close);
let delimiter_kind = subtree.delimiter.kind;
self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
self.work.push_back((idx, subtree));
@@ -293,23 +393,29 @@ impl<'a> Writer<'a> {
}
}
-struct Reader {
+struct Reader<'span, S: Span> {
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>,
ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
+ span_data_table: &'span S::Table,
}
-impl Reader {
- pub(crate) fn read(self) -> tt::Subtree {
- let mut res: Vec<Option<tt::Subtree>> = vec![None; self.subtree.len()];
+impl<'span, S: Span> Reader<'span, S> {
+ pub(crate) fn read(self) -> tt::Subtree<S> {
+ let mut res: Vec<Option<tt::Subtree<S>>> = vec![None; self.subtree.len()];
+ let read_span = |id| S::span_for_token_id(self.span_data_table, id);
for i in (0..self.subtree.len()).rev() {
let repr = &self.subtree[i];
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
let s = tt::Subtree {
- delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind },
+ delimiter: tt::Delimiter {
+ open: read_span(repr.open),
+ close: read_span(repr.close),
+ kind: repr.kind,
+ },
token_trees: token_trees
.iter()
.copied()
@@ -324,7 +430,7 @@ impl Reader {
let repr = &self.literal[idx];
tt::Leaf::Literal(tt::Literal {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -333,7 +439,7 @@ impl Reader {
tt::Leaf::Punct(tt::Punct {
char: repr.char,
spacing: repr.spacing,
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
@@ -341,7 +447,7 @@ impl Reader {
let repr = &self.ident[idx];
tt::Leaf::Ident(tt::Ident {
text: self.text[repr.text as usize].as_str().into(),
- span: repr.id,
+ span: read_span(repr.id),
})
.into()
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
index 48efbf589..5ff1f36c5 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
@@ -85,8 +85,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
}
/// Check the version of rustc that was used to compile a proc macro crate's
-///
/// binary file.
+///
/// A proc macro crate binary's ".rustc" section has following byte layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
/// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
@@ -96,8 +96,8 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
/// The bytes you get after decompressing the snappy format portion has
/// following layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
-/// * [crate root bytes] next 4 bytes is to store crate root position,
-/// according to rustc's source code comment
+/// * [crate root bytes] next 8 bytes (4 in old versions) is to store
+/// crate root position, according to rustc's source code comment
/// * [length byte] next 1 byte tells us how many bytes we should read next
/// for the version string's utf8 bytes
/// * [version string bytes encoded in utf8] <- GET THIS BOI
@@ -119,13 +119,18 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
}
let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
// Last supported version is:
- // https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632
- let snappy_portion = match version {
- 5 | 6 => &dot_rustc[8..],
+ // https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318
+ let (snappy_portion, bytes_before_version) = match version {
+ 5 | 6 => (&dot_rustc[8..], 13),
7 | 8 => {
let len_bytes = &dot_rustc[8..12];
let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize;
- &dot_rustc[12..data_len + 12]
+ (&dot_rustc[12..data_len + 12], 13)
+ }
+ 9 => {
+ let len_bytes = &dot_rustc[8..16];
+ let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
+ (&dot_rustc[16..data_len + 12], 17)
}
_ => {
return Err(io::Error::new(
@@ -142,15 +147,15 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
Box::new(SnapDecoder::new(snappy_portion))
};
- // the bytes before version string bytes, so this basically is:
+ // We're going to skip over the bytes before the version string, so basically:
// 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
- // 4 bytes for [crate root bytes]
+ // 4 or 8 bytes for [crate root bytes]
// 1 byte for length of version string
- // so 13 bytes in total, and we should check the 13th byte
+ // so 13 or 17 bytes in total, and we should check the last of those bytes
// to know the length
- let mut bytes_before_version = [0u8; 13];
- uncompressed.read_exact(&mut bytes_before_version)?;
- let length = bytes_before_version[12];
+ let mut bytes = [0u8; 17];
+ uncompressed.read_exact(&mut bytes[..bytes_before_version])?;
+ let length = bytes[bytes_before_version - 1];
let mut version_string_utf8 = vec![0u8; length as usize];
uncompressed.read_exact(&mut version_string_utf8)?;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
index bece19518..50ce586fc 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/src/main.rs
@@ -18,12 +18,12 @@ fn main() -> std::io::Result<()> {
run()
}
-#[cfg(not(feature = "sysroot-abi"))]
+#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
fn run() -> io::Result<()> {
panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled");
}
-#[cfg(feature = "sysroot-abi")]
+#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
fn run() -> io::Result<()> {
use proc_macro_api::msg::{self, Message};
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
index dd05e250c..f20e6832f 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
@@ -11,7 +11,7 @@ use libloading::Library;
use memmap2::Mmap;
use object::Object;
use paths::AbsPath;
-use proc_macro_api::{read_dylib_info, ProcMacroKind};
+use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind};
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
@@ -152,9 +152,15 @@ impl Expander {
macro_name: &str,
macro_body: &crate::tt::Subtree,
attributes: Option<&crate::tt::Subtree>,
+ def_site: TokenId,
+ call_site: TokenId,
+ mixed_site: TokenId,
) -> Result<crate::tt::Subtree, String> {
- let result = self.inner.proc_macros.expand(macro_name, macro_body, attributes);
- result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
+ let result = self
+ .inner
+ .proc_macros
+ .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site);
+ result.map_err(|e| e.into_string().unwrap_or_default())
}
pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
index 84bd15efb..56529f71d 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -10,10 +10,10 @@
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
-#![cfg(feature = "sysroot-abi")]
+#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
-#![allow(unreachable_pub)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(unreachable_pub, internal_features)]
extern crate proc_macro;
@@ -32,11 +32,23 @@ use std::{
};
use proc_macro_api::{
- msg::{self, CURRENT_API_VERSION},
+ msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION},
ProcMacroKind,
};
-use ::tt::token_id as tt;
+mod tt {
+ pub use proc_macro_api::msg::TokenId;
+
+ pub use ::tt::*;
+
+ pub type Subtree = ::tt::Subtree<TokenId>;
+ pub type TokenTree = ::tt::TokenTree<TokenId>;
+ pub type Delimiter = ::tt::Delimiter<TokenId>;
+ pub type Leaf = ::tt::Leaf<TokenId>;
+ pub type Literal = ::tt::Literal<TokenId>;
+ pub type Punct = ::tt::Punct<TokenId>;
+ pub type Ident = ::tt::Ident<TokenId>;
+}
// see `build.rs`
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
@@ -70,16 +82,28 @@ impl ProcMacroSrv {
None => None,
};
- let macro_body = task.macro_body.to_subtree(CURRENT_API_VERSION);
- let attributes = task.attributes.map(|it| it.to_subtree(CURRENT_API_VERSION));
+ let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans;
+ let def_site = TokenId(def_site as u32);
+ let call_site = TokenId(call_site as u32);
+ let mixed_site = TokenId(mixed_site as u32);
+
+ let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION);
+ let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION));
let result = thread::scope(|s| {
let thread = thread::Builder::new()
.stack_size(EXPANDER_STACK_SIZE)
.name(task.macro_name.clone())
.spawn_scoped(s, || {
expander
- .expand(&task.macro_name, &macro_body, attributes.as_ref())
- .map(|it| msg::FlatTree::new(&it, CURRENT_API_VERSION))
+ .expand(
+ &task.macro_name,
+ &macro_body,
+ attributes.as_ref(),
+ def_site,
+ call_site,
+ mixed_site,
+ )
+ .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION))
});
let res = match thread {
Ok(handle) => handle.join(),
@@ -136,8 +160,8 @@ pub struct PanicMessage {
}
impl PanicMessage {
- pub fn as_str(&self) -> Option<String> {
- self.message.clone()
+ pub fn into_string(self) -> Option<String> {
+ self.message
}
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
index 3c6f32033..716b85d09 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/proc_macros.rs
@@ -1,16 +1,17 @@
//! Proc macro ABI
use libloading::Library;
-use proc_macro_api::{ProcMacroKind, RustCInfo};
+use proc_macro::bridge;
+use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo};
use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt};
pub(crate) struct ProcMacros {
- exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+ exported_macros: Vec<bridge::client::ProcMacro>,
}
-impl From<proc_macro::bridge::PanicMessage> for crate::PanicMessage {
- fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+impl From<bridge::PanicMessage> for crate::PanicMessage {
+ fn from(p: bridge::PanicMessage) -> Self {
Self { message: p.as_str().map(|s| s.to_string()) }
}
}
@@ -31,9 +32,8 @@ impl ProcMacros {
info: RustCInfo,
) -> Result<ProcMacros, LoadProcMacroDylibError> {
if info.version_string == crate::RUSTC_VERSION_STRING {
- let macros = unsafe {
- lib.get::<&&[proc_macro::bridge::client::ProcMacro]>(symbol_name.as_bytes())
- }?;
+ let macros =
+ unsafe { lib.get::<&&[bridge::client::ProcMacro]>(symbol_name.as_bytes()) }?;
return Ok(Self { exported_macros: macros.to_vec() });
}
@@ -45,6 +45,9 @@ impl ProcMacros {
macro_name: &str,
macro_body: &tt::Subtree,
attributes: Option<&tt::Subtree>,
+ def_site: TokenId,
+ call_site: TokenId,
+ mixed_site: TokenId,
) -> Result<tt::Subtree, crate::PanicMessage> {
let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone());
@@ -54,58 +57,76 @@ impl ProcMacros {
for proc_macro in &self.exported_macros {
match proc_macro {
- proc_macro::bridge::client::ProcMacro::CustomDerive {
- trait_name, client, ..
- } if *trait_name == macro_name => {
+ bridge::client::ProcMacro::CustomDerive { trait_name, client, .. }
+ if *trait_name == macro_name =>
+ {
let res = client.run(
- &proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ &bridge::server::SameThread,
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_body,
- true,
+ false,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
- proc_macro::bridge::client::ProcMacro::Bang { name, client }
- if *name == macro_name =>
- {
+ bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
- &proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ &bridge::server::SameThread,
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_body,
- true,
+ false,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
- proc_macro::bridge::client::ProcMacro::Attr { name, client }
- if *name == macro_name =>
- {
+ bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
- &proc_macro::bridge::server::SameThread,
- crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER },
+ &bridge::server::SameThread,
+ crate::server::RustAnalyzer {
+ interner: &SYMBOL_INTERNER,
+
+ call_site,
+ def_site,
+ mixed_site,
+ },
parsed_attributes,
parsed_body,
- true,
+ false,
);
- return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from);
+ return res
+ .map(|it| it.into_subtree(call_site))
+ .map_err(crate::PanicMessage::from);
}
_ => continue,
}
}
- Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+ Err(bridge::PanicMessage::String("Nothing to expand".to_string()).into())
}
pub(crate) fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
self.exported_macros
.iter()
.map(|proc_macro| match proc_macro {
- proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+ bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
(trait_name.to_string(), ProcMacroKind::CustomDerive)
}
- proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+ bridge::client::ProcMacro::Bang { name, .. } => {
(name.to_string(), ProcMacroKind::FuncLike)
}
- proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+ bridge::client::ProcMacro::Attr { name, .. } => {
(name.to_string(), ProcMacroKind::Attr)
}
})
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
index fe18451d3..917d8a6e2 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
@@ -11,6 +11,7 @@
use proc_macro::bridge::{self, server};
mod token_stream;
+use proc_macro_api::msg::TokenId;
pub use token_stream::TokenStream;
use token_stream::TokenStreamBuilder;
@@ -43,6 +44,9 @@ pub struct FreeFunctions;
pub struct RustAnalyzer {
// FIXME: store span information here.
pub(crate) interner: SymbolInternerRef,
+ pub call_site: TokenId,
+ pub def_site: TokenId,
+ pub mixed_site: TokenId,
}
impl server::Types for RustAnalyzer {
@@ -54,6 +58,10 @@ impl server::Types for RustAnalyzer {
}
impl server::FreeFunctions for RustAnalyzer {
+ fn injected_env_var(&mut self, _var: &str) -> Option<String> {
+ None
+ }
+
fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
// FIXME: track env var accesses
// https://github.com/rust-lang/rust/pull/71858
@@ -69,7 +77,7 @@ impl server::FreeFunctions for RustAnalyzer {
kind: bridge::LitKind::Err,
symbol: Symbol::intern(self.interner, s),
suffix: None,
- span: tt::TokenId::unspecified(),
+ span: self.call_site,
})
}
@@ -83,7 +91,7 @@ impl server::TokenStream for RustAnalyzer {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
- src.parse().expect("cannot parse string")
+ Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string")
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
@@ -280,7 +288,7 @@ impl server::Span for RustAnalyzer {
}
fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
// FIXME stub
- tt::TokenId::unspecified()
+ self.call_site
}
/// Recent feature, not yet in the proc_macro
///
@@ -317,15 +325,15 @@ impl server::Span for RustAnalyzer {
}
fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
// FIXME handle span
- tt::TokenId::unspecified()
+ self.call_site
}
fn end(&mut self, _self_: Self::Span) -> Self::Span {
- tt::TokenId::unspecified()
+ self.call_site
}
fn start(&mut self, _self_: Self::Span) -> Self::Span {
- tt::TokenId::unspecified()
+ self.call_site
}
fn line(&mut self, _span: Self::Span) -> usize {
@@ -349,9 +357,9 @@ impl server::Symbol for RustAnalyzer {
impl server::Server for RustAnalyzer {
fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
bridge::ExpnGlobals {
- def_site: Span::unspecified(),
- call_site: Span::unspecified(),
- mixed_site: Span::unspecified(),
+ def_site: self.def_site,
+ call_site: self.call_site,
+ mixed_site: self.mixed_site,
}
}
@@ -430,16 +438,16 @@ mod tests {
token_trees: vec![
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "struct".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
})),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "T".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
})),
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
+ open: tt::TokenId(0),
+ close: tt::TokenId(0),
kind: tt::DelimiterKind::Brace,
},
token_trees: vec![],
@@ -452,33 +460,32 @@ mod tests {
#[test]
fn test_ra_server_from_str() {
- use std::str::FromStr;
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId::unspecified(),
- close: tt::TokenId::unspecified(),
+ open: tt::TokenId(0),
+ close: tt::TokenId(0),
kind: tt::DelimiterKind::Parenthesis,
},
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "a".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
}))],
});
- let t1 = TokenStream::from_str("(a)").unwrap();
+ let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
assert_eq!(t1.token_trees.len(), 1);
assert_eq!(t1.token_trees[0], subtree_paren_a);
- let t2 = TokenStream::from_str("(a);").unwrap();
+ let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
assert_eq!(t2.token_trees.len(), 2);
assert_eq!(t2.token_trees[0], subtree_paren_a);
- let underscore = TokenStream::from_str("_").unwrap();
+ let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
assert_eq!(
underscore.token_trees[0],
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
text: "_".into(),
- span: tt::TokenId::unspecified(),
+ span: tt::TokenId(0),
}))
);
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs
index 2589d8b64..36be88250 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server/token_stream.rs
@@ -1,5 +1,7 @@
//! TokenStream implementation used by sysroot ABI
+use proc_macro_api::msg::TokenId;
+
use crate::tt::{self, TokenTree};
#[derive(Debug, Default, Clone)]
@@ -20,8 +22,15 @@ impl TokenStream {
}
}
- pub(crate) fn into_subtree(self) -> tt::Subtree {
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
+ pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree {
+ tt::Subtree {
+ delimiter: tt::Delimiter {
+ open: call_site,
+ close: call_site,
+ kind: tt::DelimiterKind::Invisible,
+ },
+ token_trees: self.token_trees,
+ }
}
pub(super) fn is_empty(&self) -> bool {
@@ -84,7 +93,7 @@ pub(super) struct TokenStreamBuilder {
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
pub(super) mod token_stream {
- use std::str::FromStr;
+ use proc_macro_api::msg::TokenId;
use super::{tt, TokenStream, TokenTree};
@@ -109,14 +118,15 @@ pub(super) mod token_stream {
///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
- impl FromStr for TokenStream {
- type Err = LexError;
+ #[rustfmt::skip]
+ impl /*FromStr for*/ TokenStream {
+ // type Err = LexError;
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- let (subtree, _token_map) =
- mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+ pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result<TokenStream, LexError> {
+ let subtree =
+ mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?;
- let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+ let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site);
Ok(TokenStream::with_subtree(subtree))
}
}
@@ -127,43 +137,39 @@ pub(super) mod token_stream {
}
}
- fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+ fn subtree_replace_token_ids_with_call_site(
+ subtree: tt::Subtree,
+ call_site: TokenId,
+ ) -> tt::Subtree {
tt::Subtree {
- delimiter: tt::Delimiter {
- open: tt::TokenId::UNSPECIFIED,
- close: tt::TokenId::UNSPECIFIED,
- ..subtree.delimiter
- },
+ delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter },
token_trees: subtree
.token_trees
.into_iter()
- .map(token_tree_replace_token_ids_with_unspecified)
+ .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site))
.collect(),
}
}
- fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+ fn token_tree_replace_token_ids_with_call_site(
+ tt: tt::TokenTree,
+ call_site: TokenId,
+ ) -> tt::TokenTree {
match tt {
tt::TokenTree::Leaf(leaf) => {
- tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+ tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site))
}
tt::TokenTree::Subtree(subtree) => {
- tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+ tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site))
}
}
}
- fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+ fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf {
match leaf {
- tt::Leaf::Literal(lit) => {
- tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
- }
- tt::Leaf::Punct(punct) => {
- tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
- }
- tt::Leaf::Ident(ident) => {
- tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
- }
+ tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }),
+ tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }),
+ tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
index 04a0ae7bc..b04e3ca19 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -8,7 +8,7 @@ use expect_test::expect;
#[test]
fn test_derive_empty() {
- assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 4294967295 4294967295"]);
+ assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]);
}
#[test]
@@ -17,12 +17,12 @@ fn test_derive_error() {
"DeriveError",
r#"struct S;"#,
expect![[r##"
- SUBTREE $$ 4294967295 4294967295
- IDENT compile_error 4294967295
- PUNCH ! [alone] 4294967295
- SUBTREE () 4294967295 4294967295
- LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
- PUNCH ; [alone] 4294967295"##]],
+ SUBTREE $$ 1 1
+ IDENT compile_error 1
+ PUNCH ! [alone] 1
+ SUBTREE () 1 1
+ LITERAL "#[derive(DeriveError)] struct S ;" 1
+ PUNCH ; [alone] 1"##]],
);
}
@@ -32,14 +32,14 @@ fn test_fn_like_macro_noop() {
"fn_like_noop",
r#"ident, 0, 1, []"#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT ident 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 0 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 1 4294967295
- PUNCH , [alone] 4294967295
- SUBTREE [] 4294967295 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT ident 1
+ PUNCH , [alone] 1
+ LITERAL 0 1
+ PUNCH , [alone] 1
+ LITERAL 1 1
+ PUNCH , [alone] 1
+ SUBTREE [] 1 1"#]],
);
}
@@ -49,10 +49,10 @@ fn test_fn_like_macro_clone_ident_subtree() {
"fn_like_clone_tokens",
r#"ident, []"#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT ident 4294967295
- PUNCH , [alone] 4294967295
- SUBTREE [] 4294967295 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT ident 1
+ PUNCH , [alone] 1
+ SUBTREE [] 1 1"#]],
);
}
@@ -62,8 +62,8 @@ fn test_fn_like_macro_clone_raw_ident() {
"fn_like_clone_tokens",
"r#async",
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT r#async 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT r#async 1"#]],
);
}
@@ -73,14 +73,14 @@ fn test_fn_like_mk_literals() {
"fn_like_mk_literals",
r#""#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- LITERAL b"byte_string" 4294967295
- LITERAL 'c' 4294967295
- LITERAL "string" 4294967295
- LITERAL 3.14f64 4294967295
- LITERAL 3.14 4294967295
- LITERAL 123i64 4294967295
- LITERAL 123 4294967295"#]],
+ SUBTREE $$ 1 1
+ LITERAL b"byte_string" 1
+ LITERAL 'c' 1
+ LITERAL "string" 1
+ LITERAL 3.14f64 1
+ LITERAL 3.14 1
+ LITERAL 123i64 1
+ LITERAL 123 1"#]],
);
}
@@ -90,9 +90,9 @@ fn test_fn_like_mk_idents() {
"fn_like_mk_idents",
r#""#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- IDENT standard 4294967295
- IDENT r#raw 4294967295"#]],
+ SUBTREE $$ 1 1
+ IDENT standard 1
+ IDENT r#raw 1"#]],
);
}
@@ -102,17 +102,17 @@ fn test_fn_like_macro_clone_literals() {
"fn_like_clone_tokens",
r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
expect![[r#"
- SUBTREE $$ 4294967295 4294967295
- LITERAL 1u16 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 2_u32 4294967295
- PUNCH , [alone] 4294967295
- PUNCH - [alone] 4294967295
- LITERAL 4i64 4294967295
- PUNCH , [alone] 4294967295
- LITERAL 3.14f32 4294967295
- PUNCH , [alone] 4294967295
- LITERAL "hello bridge" 4294967295"#]],
+ SUBTREE $$ 1 1
+ LITERAL 1u16 1
+ PUNCH , [alone] 1
+ LITERAL 2_u32 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL 4i64 1
+ PUNCH , [alone] 1
+ LITERAL 3.14f32 1
+ PUNCH , [alone] 1
+ LITERAL "hello bridge" 1"#]],
);
}
@@ -126,12 +126,12 @@ fn test_attr_macro() {
r#"mod m {}"#,
r#"some arguments"#,
expect![[r##"
- SUBTREE $$ 4294967295 4294967295
- IDENT compile_error 4294967295
- PUNCH ! [alone] 4294967295
- SUBTREE () 4294967295 4294967295
- LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
- PUNCH ; [alone] 4294967295"##]],
+ SUBTREE $$ 1 1
+ IDENT compile_error 1
+ PUNCH ! [alone] 1
+ SUBTREE () 1 1
+ LITERAL "#[attr_error(some arguments)] mod m {}" 1
+ PUNCH ; [alone] 1"##]],
);
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
index 49b4d973b..c12096d14 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,18 +1,18 @@
//! utils used in proc-macro tests
use expect_test::Expect;
-use std::str::FromStr;
+use proc_macro_api::msg::TokenId;
use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv};
-fn parse_string(code: &str) -> Option<crate::server::TokenStream> {
+fn parse_string(code: &str, call_site: TokenId) -> Option<crate::server::TokenStream> {
// This is a bit strange. We need to parse a string into a token stream into
// order to create a tt::SubTree from it in fixtures. `into_subtree` is
// implemented by all the ABIs we have so we arbitrarily choose one ABI to
// write a `parse_string` function for and use that. The tests don't really
// care which ABI we're using as the `into_subtree` function isn't part of
// the ABI and shouldn't change between ABI versions.
- crate::server::TokenStream::from_str(code).ok()
+ crate::server::TokenStream::from_str(code, call_site).ok()
}
pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) {
@@ -24,12 +24,24 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e
}
fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) {
+ let def_site = TokenId(0);
+ let call_site = TokenId(1);
+ let mixed_site = TokenId(2);
let path = proc_macro_test_dylib_path();
let expander = dylib::Expander::new(&path).unwrap();
- let fixture = parse_string(input).unwrap();
- let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree());
-
- let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap();
+ let fixture = parse_string(input, call_site).unwrap();
+ let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site));
+
+ let res = expander
+ .expand(
+ macro_name,
+ &fixture.into_subtree(call_site),
+ attr.as_ref(),
+ def_site,
+ call_site,
+ mixed_site,
+ )
+ .unwrap();
expect.assert_eq(&format!("{res:?}"));
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
index 77b4afd7d..12d7c07d3 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
doctest = false
[build-dependencies]
-cargo_metadata = "0.15.0"
+cargo_metadata.workspace = true
proc-macro-test-impl = { path = "imp", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
index feeacdb64..32510fba2 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/imp/src/lib.rs
@@ -1,6 +1,6 @@
//! Exports a few trivial procedural macros for testing.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
index 6d57bc81e..739c6ec6f 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/src/lib.rs
@@ -1,6 +1,6 @@
//! Exports a few trivial procedural macros for testing.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub static PROC_MACRO_TEST_LOCATION: &str =
include_str!(concat!(env!("OUT_DIR"), "/proc_macro_test_location.txt"));
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
index 937834a82..56ce9d11c 100644
--- a/src/tools/rust-analyzer/crates/profile/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -14,8 +14,8 @@ doctest = false
[dependencies]
once_cell = "1.17.0"
cfg-if = "1.0.0"
-libc = "0.2.135"
la-arena.workspace = true
+libc.workspace = true
countme = { version = "3.0.1", features = ["enable"] }
jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
diff --git a/src/tools/rust-analyzer/crates/profile/src/lib.rs b/src/tools/rust-analyzer/crates/profile/src/lib.rs
index e7fc3d970..fdd724e2a 100644
--- a/src/tools/rust-analyzer/crates/profile/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/profile/src/lib.rs
@@ -1,6 +1,6 @@
//! A collection of tools for profiling rust-analyzer.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod stop_watch;
mod memory_usage;
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
index 75977fc5b..3e48de645 100644
--- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -12,16 +12,16 @@ rust-version.workspace = true
doctest = false
[dependencies]
-tracing = "0.1.35"
+anyhow.workspace = true
+cargo_metadata.workspace = true
rustc-hash = "1.1.0"
-cargo_metadata = "0.15.0"
semver = "1.0.14"
serde_json.workspace = true
serde.workspace = true
+tracing.workspace = true
triomphe.workspace = true
-anyhow = "1.0.62"
la-arena.workspace = true
-itertools = "0.10.5"
+itertools.workspace = true
# local deps
base-db.workspace = true
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
index fb0f3ab7d..68cd40c04 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
@@ -73,6 +73,10 @@ impl WorkspaceBuildScripts {
cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]);
cmd.args(&config.extra_args);
+ if let Some(target_dir) = &config.target_dir {
+ cmd.arg("--target-dir").arg(target_dir);
+ }
+
// --all-targets includes tests, benches and examples in addition to the
// default lib and bins. This is an independent concept from the --target
// flag below.
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index e47808a2c..ca3d6e059 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -96,6 +96,8 @@ pub struct CargoConfig {
pub extra_env: FxHashMap<String, String>,
pub invocation_strategy: InvocationStrategy,
pub invocation_location: InvocationLocation,
+ /// Optional path to use instead of `target` when building
+ pub target_dir: Option<PathBuf>,
}
pub type Package = Idx<PackageData>;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
index 901dcfd2b..5f9b70828 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -15,7 +15,7 @@
//! procedural macros).
//! * Lowering of concrete model to a [`base_db::CrateGraph`]
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod manifest_path;
mod cargo_workspace;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
index 80897f747..931eba115 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
@@ -49,7 +49,7 @@
//! user explores them belongs to that extension (it's totally valid to change
//! rust-project.json over time via configuration request!)
-use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition};
+use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, DependencyKind, Edition};
use la_arena::RawIdx;
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
@@ -135,6 +135,7 @@ impl ProjectJson {
Dependency::new(
dep_data.name,
CrateId::from_raw(RawIdx::from(dep_data.krate as u32)),
+ DependencyKind::Normal,
)
})
.collect::<Vec<_>>(),
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
index 7815b9dda..4887b2981 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -58,7 +58,7 @@ fn load_cargo_with_sysroot(
&mut {
|path| {
let len = file_map.len();
- Some(*file_map.entry(path.to_path_buf()).or_insert(FileId(len as u32)))
+ Some(*file_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
}
},
&Default::default(),
@@ -142,7 +142,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> (CrateGraph, ProcMacro
let mut counter = 0;
move |_path| {
counter += 1;
- Some(FileId(counter))
+ Some(FileId::from_raw(counter))
}
},
&Default::default(),
@@ -249,3 +249,55 @@ fn crate_graph_dedup() {
crate_graph.extend(regex_crate_graph, &mut regex_proc_macros);
assert_eq!(crate_graph.iter().count(), 118);
}
+
+#[test]
+fn test_deduplicate_origin_dev() {
+ let path_map = &mut Default::default();
+ let (mut crate_graph, _proc_macros) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_A.json");
+ crate_graph.sort_deps();
+ let (crate_graph_1, mut _proc_macros_2) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_B.json");
+
+ crate_graph.extend(crate_graph_1, &mut _proc_macros_2);
+
+ let mut crates_named_p2 = vec![];
+ for id in crate_graph.iter() {
+ let krate = &crate_graph[id];
+ if let Some(name) = krate.display_name.as_ref() {
+ if name.to_string() == "p2" {
+ crates_named_p2.push(krate);
+ }
+ }
+ }
+
+ assert!(crates_named_p2.len() == 1);
+ let p2 = crates_named_p2[0];
+ assert!(p2.origin.is_local());
+}
+
+#[test]
+fn test_deduplicate_origin_dev_rev() {
+ let path_map = &mut Default::default();
+ let (mut crate_graph, _proc_macros) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_B.json");
+ crate_graph.sort_deps();
+ let (crate_graph_1, mut _proc_macros_2) =
+ load_cargo_with_sysroot(path_map, "deduplication_crate_graph_A.json");
+
+ crate_graph.extend(crate_graph_1, &mut _proc_macros_2);
+
+ let mut crates_named_p2 = vec![];
+ for id in crate_graph.iter() {
+ let krate = &crate_graph[id];
+ if let Some(name) = krate.display_name.as_ref() {
+ if name.to_string() == "p2" {
+ crates_named_p2.push(krate);
+ }
+ }
+ }
+
+ assert!(crates_named_p2.len() == 1);
+ let p2 = crates_named_p2[0];
+ assert!(p2.origin.is_local());
+}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index e0209ca15..933357035 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -6,8 +6,8 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr,
use anyhow::{format_err, Context};
use base_db::{
- CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
- FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult,
+ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind,
+ Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult,
};
use cfg::{CfgDiff, CfgOptions};
use paths::{AbsPath, AbsPathBuf};
@@ -834,7 +834,7 @@ fn project_json_to_crate_graph(
for dep in &krate.deps {
if let Some(&to) = crates.get(&dep.crate_id) {
- add_dep(crate_graph, from, dep.name.clone(), to)
+ add_dep(crate_graph, from, dep.name.clone(), to, dep.kind().to_owned())
}
}
}
@@ -979,7 +979,7 @@ fn cargo_to_crate_graph(
// cargo metadata does not do any normalization,
// so we do it ourselves currently
let name = CrateName::normalize_dashes(&name);
- add_dep(crate_graph, from, name, to);
+ add_dep(crate_graph, from, name, to, DependencyKind::Normal);
}
}
}
@@ -999,7 +999,17 @@ fn cargo_to_crate_graph(
continue;
}
- add_dep(crate_graph, from, name.clone(), to)
+ add_dep(
+ crate_graph,
+ from,
+ name.clone(),
+ to,
+ match dep.kind {
+ DepKind::Normal => DependencyKind::Normal,
+ DepKind::Dev => DependencyKind::Dev,
+ DepKind::Build => DependencyKind::Build,
+ },
+ )
}
}
}
@@ -1187,7 +1197,17 @@ fn handle_rustc_crates(
let name = CrateName::new(&dep.name).unwrap();
if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() {
- add_dep(crate_graph, from, name.clone(), to);
+ add_dep(
+ crate_graph,
+ from,
+ name.clone(),
+ to,
+ match dep.kind {
+ DepKind::Normal => DependencyKind::Normal,
+ DepKind::Dev => DependencyKind::Dev,
+ DepKind::Build => DependencyKind::Build,
+ },
+ );
}
}
}
@@ -1209,7 +1229,7 @@ fn handle_rustc_crates(
// `rust_analyzer` thinks that it should use the one from the `rustc_source`
// instead of the one from `crates.io`
if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) {
- add_dep(crate_graph, *from, name.clone(), to);
+ add_dep(crate_graph, *from, name.clone(), to, DependencyKind::Normal);
}
}
}
@@ -1308,7 +1328,14 @@ impl SysrootPublicDeps {
/// Makes `from` depend on the public sysroot crates.
fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) {
for (name, krate, prelude) in &self.deps {
- add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude);
+ add_dep_with_prelude(
+ crate_graph,
+ from,
+ name.clone(),
+ *krate,
+ *prelude,
+ DependencyKind::Normal,
+ );
}
}
}
@@ -1363,7 +1390,7 @@ fn sysroot_to_crate_graph(
for &to in sysroot[from].deps.iter() {
let name = CrateName::new(&sysroot[to].name).unwrap();
if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) {
- add_dep(crate_graph, from, name, to);
+ add_dep(crate_graph, from, name, to, DependencyKind::Normal);
}
}
}
@@ -1442,8 +1469,14 @@ fn handle_hack_cargo_workspace(
.collect()
}
-fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) {
- add_dep_inner(graph, from, Dependency::new(name, to))
+fn add_dep(
+ graph: &mut CrateGraph,
+ from: CrateId,
+ name: CrateName,
+ to: CrateId,
+ kind: DependencyKind,
+) {
+ add_dep_inner(graph, from, Dependency::new(name, to, kind))
}
fn add_dep_with_prelude(
@@ -1452,12 +1485,20 @@ fn add_dep_with_prelude(
name: CrateName,
to: CrateId,
prelude: bool,
+ kind: DependencyKind,
) {
- add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude))
+ add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude, kind))
}
fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId, prelude: bool) {
- add_dep_with_prelude(crate_graph, from, CrateName::new("proc_macro").unwrap(), to, prelude);
+ add_dep_with_prelude(
+ crate_graph,
+ from,
+ CrateName::new("proc_macro").unwrap(),
+ to,
+ prelude,
+ DependencyKind::Normal,
+ );
}
fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json
new file mode 100644
index 000000000..b0fb5845c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_A.json
@@ -0,0 +1,140 @@
+{
+ "packages": [
+ {
+ "name": "p1",
+ "version": "0.1.0",
+ "id": "p1 0.1.0 (path+file:///example_project/p1)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [
+ {
+ "name": "p2",
+ "source": null,
+ "req": "*",
+ "kind": null,
+ "rename": null,
+ "optional": false,
+ "uses_default_features": true,
+ "features": [],
+ "target": null,
+ "registry": null,
+ "path": "$ROOT$example_project/p2"
+ }
+ ],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "p1",
+ "src_path": "$ROOT$example_project/p1/src/lib.rs",
+ "edition": "2021",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$example_project/p1/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2021",
+ "links": null,
+ "default_run": null,
+ "rust_version": null
+ },
+ {
+ "name": "p2",
+ "version": "0.1.0",
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "p2",
+ "src_path": "$ROOT$example_project/p2/src/lib.rs",
+ "edition": "2021",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$example_project/p2/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2021",
+ "links": null,
+ "default_run": null,
+ "rust_version": null
+ }
+ ],
+ "workspace_members": [
+ "p1 0.1.0 (path+file:///example_project/p1)"
+ ],
+ "workspace_default_members": [
+ "p1 0.1.0 (path+file:///example_project/p1)"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "p1 0.1.0 (path+file:///example_project/p1)",
+ "dependencies": [
+ "p2 0.1.0 (path+file:///example_project/p2)"
+ ],
+ "deps": [
+ {
+ "name": "p2",
+ "pkg": "p2 0.1.0 (path+file:///example_project/p2)",
+ "dep_kinds": [
+ {
+ "kind": null,
+ "target": null
+ }
+ ]
+ }
+ ],
+ "features": []
+ },
+ {
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "p1 0.1.0 (path+file:///example_project/p1)"
+ },
+ "target_directory": "$ROOT$example_project/p1/target",
+ "version": 1,
+ "workspace_root": "$ROOT$example_project/p1",
+ "metadata": null
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json
new file mode 100644
index 000000000..b5d1e16e6
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/deduplication_crate_graph_B.json
@@ -0,0 +1,66 @@
+{
+ "packages": [
+ {
+ "name": "p2",
+ "version": "0.1.0",
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "license": null,
+ "license_file": null,
+ "description": null,
+ "source": null,
+ "dependencies": [],
+ "targets": [
+ {
+ "kind": [
+ "lib"
+ ],
+ "crate_types": [
+ "lib"
+ ],
+ "name": "p2",
+ "src_path": "$ROOT$example_project/p2/src/lib.rs",
+ "edition": "2021",
+ "doc": true,
+ "doctest": true,
+ "test": true
+ }
+ ],
+ "features": {},
+ "manifest_path": "$ROOT$example_project/p2/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": null,
+ "repository": null,
+ "homepage": null,
+ "documentation": null,
+ "edition": "2021",
+ "links": null,
+ "default_run": null,
+ "rust_version": null
+ }
+ ],
+ "workspace_members": [
+ "p2 0.1.0 (path+file:///example_project/p2)"
+ ],
+ "workspace_default_members": [
+ "p2 0.1.0 (path+file:///example_project/p2)"
+ ],
+ "resolve": {
+ "nodes": [
+ {
+ "id": "p2 0.1.0 (path+file:///example_project/p2)",
+ "dependencies": [],
+ "deps": [],
+ "features": []
+ }
+ ],
+ "root": "p2 0.1.0 (path+file:///example_project/p2)"
+ },
+ "target_directory": "$ROOT$example_project/p2/target",
+ "version": 1,
+ "workspace_root": "$ROOT$example_project/p2",
+ "metadata": null
+} \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
index 727d39a30..e98f016ca 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
@@ -48,6 +48,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -112,6 +113,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -119,6 +121,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -183,6 +186,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -190,6 +194,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -254,6 +259,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -261,6 +267,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
index 727d39a30..e98f016ca 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
@@ -48,6 +48,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -112,6 +113,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -119,6 +121,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -183,6 +186,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -190,6 +194,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -254,6 +259,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -261,6 +267,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
index 89728babd..7ecd53572 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
@@ -47,6 +47,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -110,6 +111,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -117,6 +119,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -180,6 +183,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -187,6 +191,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
@@ -250,6 +255,7 @@
name: CrateName(
"hello_world",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -257,6 +263,7 @@
name: CrateName(
"libc",
),
+ kind: Normal,
prelude: true,
},
],
diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
index b7bf6cb27..581a6afc1 100644
--- a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
+++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
@@ -28,6 +28,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
],
@@ -168,6 +169,7 @@
name: CrateName(
"std",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -175,6 +177,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
],
@@ -249,6 +252,7 @@
name: CrateName(
"alloc",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -256,6 +260,7 @@
name: CrateName(
"panic_unwind",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -263,6 +268,7 @@
name: CrateName(
"panic_abort",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -270,6 +276,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -277,6 +284,7 @@
name: CrateName(
"profiler_builtins",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -284,6 +292,7 @@
name: CrateName(
"unwind",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -291,6 +300,7 @@
name: CrateName(
"std_detect",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -298,6 +308,7 @@
name: CrateName(
"test",
),
+ kind: Normal,
prelude: true,
},
],
@@ -438,6 +449,7 @@
name: CrateName(
"core",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -445,6 +457,7 @@
name: CrateName(
"alloc",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -452,6 +465,7 @@
name: CrateName(
"std",
),
+ kind: Normal,
prelude: true,
},
Dependency {
@@ -459,6 +473,7 @@
name: CrateName(
"test",
),
+ kind: Normal,
prelude: false,
},
Dependency {
@@ -466,6 +481,7 @@
name: CrateName(
"proc_macro",
),
+ kind: Normal,
prelude: false,
},
],
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
index 7410f0a3a..39ac338aa 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -19,44 +19,37 @@ name = "rust-analyzer"
path = "src/bin/main.rs"
[dependencies]
-anyhow = "1.0.62"
+anyhow.workspace = true
crossbeam-channel = "0.5.5"
-dissimilar = "1.0.4"
-itertools = "0.10.5"
-scip = "0.1.1"
+dissimilar.workspace = true
+itertools.workspace = true
+scip = "0.3.1"
lsp-types = { version = "=0.94.0", features = ["proposed"] }
parking_lot = "0.12.1"
xflags = "0.3.0"
oorandom = "11.1.3"
+rayon.workspace = true
rustc-hash = "1.1.0"
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
-rayon = "1.6.1"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
lsp-server.workspace = true
-tracing = "0.1.35"
-tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "registry",
- "fmt",
- "tracing-log",
-] }
-tracing-log = "0.1.3"
-tracing-tree = "0.2.1"
+tracing.workspace = true
+tracing-subscriber.workspace = true
+tracing-log = "0.2.0"
+tracing-tree.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
always-assert = "0.1.2"
-
-# These 3 deps are not used by r-a directly, but we list them here to lock in their versions
-# in our transitive deps to prevent them from pulling in windows-sys 0.45.0
-mio = "=0.8.5"
-parking_lot_core = "=0.9.6"
+walkdir = "2.3.2"
cfg.workspace = true
flycheck.workspace = true
hir-def.workspace = true
hir-ty.workspace = true
hir.workspace = true
+rustc-dependencies.workspace = true
ide-db.workspace = true
# This should only be used in CLI
ide-ssr.workspace = true
@@ -67,6 +60,7 @@ profile.workspace = true
project-model.workspace = true
stdx.workspace = true
syntax.workspace = true
+parser.workspace = true
toolchain.workspace = true
vfs-notify.workspace = true
vfs.workspace = true
@@ -79,7 +73,7 @@ jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = tr
[dev-dependencies]
expect-test = "1.4.0"
-xshell = "0.2.2"
+xshell.workspace = true
test-utils.workspace = true
sourcegen.workspace = true
@@ -89,4 +83,13 @@ mbe.workspace = true
jemalloc = ["jemallocator", "profile/jemalloc"]
force-always-assert = ["always-assert/force"]
sysroot-abi = []
-in-rust-tree = ["sysroot-abi", "ide/in-rust-tree", "syntax/in-rust-tree"]
+in-rust-tree = [
+ "sysroot-abi",
+ "ide/in-rust-tree",
+ "syntax/in-rust-tree",
+ "parser/in-rust-tree",
+ "rustc-dependencies/in-rust-tree",
+ "hir/in-rust-tree",
+ "hir-def/in-rust-tree",
+ "hir-ty/in-rust-tree",
+]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
index 2fa14fc7e..8472e49de 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -2,7 +2,11 @@
//!
//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+#[cfg(feature = "in-rust-tree")]
+#[allow(unused_extern_crates)]
+extern crate rustc_driver;
mod logger;
mod rustc_wrapper;
@@ -83,6 +87,7 @@ fn main() -> anyhow::Result<()> {
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?,
}
Ok(())
}
@@ -190,6 +195,12 @@ fn run_server() -> anyhow::Result<()> {
}
};
+ let mut is_visual_studio_code = false;
+ if let Some(client_info) = client_info {
+ tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
+ is_visual_studio_code = client_info.name.starts_with("Visual Studio Code");
+ }
+
let workspace_roots = workspace_folders
.map(|workspaces| {
workspaces
@@ -201,7 +212,7 @@ fn run_server() -> anyhow::Result<()> {
})
.filter(|workspaces| !workspaces.is_empty())
.unwrap_or_else(|| vec![root_path.clone()]);
- let mut config = Config::new(root_path, capabilities, workspace_roots);
+ let mut config = Config::new(root_path, capabilities, workspace_roots, is_visual_studio_code);
if let Some(json) = initialization_options {
if let Err(e) = config.update(json) {
use lsp_types::{
@@ -231,10 +242,6 @@ fn run_server() -> anyhow::Result<()> {
connection.initialize_finish(initialize_id, initialize_result)?;
- if let Some(client_info) = client_info {
- tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
- }
-
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
index c7b84c41b..728bade0d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs
@@ -209,7 +209,7 @@ mod tests {
use super::*;
use cfg::CfgExpr;
- use mbe::syntax_node_to_token_tree;
+ use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{
ast::{self, AstNode},
SmolStr,
@@ -219,7 +219,7 @@ mod tests {
let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
- let (tt, _) = syntax_node_to_token_tree(tt.syntax());
+ let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap);
CfgExpr::parse(&tt)
};
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
index 64646b33a..de00c4192 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
@@ -10,6 +10,7 @@ mod ssr;
mod lsif;
mod scip;
mod run_tests;
+mod rustc_tests;
mod progress_report;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index dcb3ca658..1908c73b3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -8,7 +8,7 @@ use std::{
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
- Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name,
+ Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ModuleDef, Name,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
@@ -762,7 +762,8 @@ impl flags::AnalysisStats {
group: true,
skip_glob_imports: true,
},
- prefer_no_std: Default::default(),
+ prefer_no_std: false,
+ prefer_prelude: true,
},
ide::AssistResolveStrategy::All,
file_id,
@@ -782,6 +783,7 @@ impl flags::AnalysisStats {
closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
closure_capture_hints: true,
binding_mode_hints: true,
+ implicit_drop_hints: true,
lifetime_elision_hints: ide::LifetimeElisionHints::Always,
param_names_for_lifetime_elision_hints: true,
hide_named_constructor_hints: false,
@@ -846,9 +848,7 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa
Err(SyntheticSyntax) => return "synthetic,,".to_string(),
};
let root = db.parse_or_expand(src.file_id);
- let node = src.map(|e| {
- e.either(|it| it.to_node(&root).syntax().clone(), |it| it.to_node(&root).syntax().clone())
- });
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id);
@@ -888,12 +888,7 @@ fn pat_syntax_range(
let src = sm.pat_syntax(pat_id);
if let Ok(src) = src {
let root = db.parse_or_expand(src.file_id);
- let node = src.map(|e| {
- e.either(
- |it| it.to_node(&root).syntax().clone(),
- |it| it.to_node(&root).syntax().clone(),
- )
- });
+ let node = src.map(|e| e.to_node(&root).syntax().clone());
let original_range = node.as_ref().original_file_range(db);
let path = vfs.file_path(original_range.file_id);
let line_index = db.line_index(original_range.file_id);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index 8541be715..abec26794 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -4,7 +4,7 @@
use project_model::{CargoConfig, RustLibSource};
use rustc_hash::FxHashSet;
-use hir::{db::HirDatabase, Crate, Module};
+use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
use ide_db::base_db::SourceDatabaseExt;
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
index 419440b6d..5633c0c48 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -98,6 +98,15 @@ xflags::xflags! {
required path: PathBuf
}
+ /// Run unit tests of the project using mir interpreter
+ cmd rustc-tests {
+ /// Directory with Cargo.toml.
+ required rustc_repo: PathBuf
+
+ /// Only run tests with filter as substring
+ optional --filter path: String
+ }
+
cmd diagnostics {
/// Directory with Cargo.toml.
required path: PathBuf
@@ -131,6 +140,9 @@ xflags::xflags! {
/// The output path where the SCIP file will be written to. Defaults to `index.scip`.
optional --output path: PathBuf
+
+ /// A path to an json configuration file that can be used to customize cargo behavior.
+ optional --config-path config_path: PathBuf
}
}
}
@@ -156,6 +168,7 @@ pub enum RustAnalyzerCmd {
Highlight(Highlight),
AnalysisStats(AnalysisStats),
RunTests(RunTests),
+ RustcTests(RustcTests),
Diagnostics(Diagnostics),
Ssr(Ssr),
Search(Search),
@@ -209,6 +222,12 @@ pub struct RunTests {
}
#[derive(Debug)]
+pub struct RustcTests {
+ pub rustc_repo: PathBuf,
+ pub filter: Option<String>,
+}
+
+#[derive(Debug)]
pub struct Diagnostics {
pub path: PathBuf,
@@ -239,6 +258,7 @@ pub struct Scip {
pub path: PathBuf,
pub output: Option<PathBuf>,
+ pub config_path: Option<PathBuf>,
}
impl RustAnalyzer {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
new file mode 100644
index 000000000..c89b88ac0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -0,0 +1,236 @@
+//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
+
+use std::{
+ cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf,
+};
+
+use hir::Crate;
+use ide::{AnalysisHost, Change, DiagnosticCode, DiagnosticsConfig};
+use profile::StopWatch;
+use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
+
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
+use triomphe::Arc;
+use vfs::{AbsPathBuf, FileId};
+use walkdir::WalkDir;
+
+use crate::cli::{flags, report_metric, Result};
+
+struct Tester {
+ host: AnalysisHost,
+ root_file: FileId,
+ pass_count: u64,
+ ignore_count: u64,
+ fail_count: u64,
+ stopwatch: StopWatch,
+}
+
+fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
+ thread_local! {
+ static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new());
+ }
+ LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
+ Some(c) => *c,
+ None => {
+ let v = DiagnosticCode::RustcHardError(format!("E{code}").leak());
+ s.insert(code.to_owned(), v);
+ v
+ }
+ })
+}
+
+fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> {
+ let text = read_to_string(p).unwrap();
+ let mut result = HashMap::new();
+ {
+ let mut text = &*text;
+ while let Some(p) = text.find("error[E") {
+ text = &text[p + 7..];
+ let code = string_to_diagnostic_code_leaky(&text[..4]);
+ *result.entry(code).or_insert(0) += 1;
+ }
+ }
+ result
+}
+
+impl Tester {
+ fn new() -> Result<Self> {
+ let tmp_file = AbsPathBuf::assert("/tmp/ra-rustc-test.rs".into());
+ std::fs::write(&tmp_file, "")?;
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
+ let workspace = ProjectWorkspace::DetachedFiles {
+ files: vec![tmp_file.clone()],
+ sysroot: Ok(
+ Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env).unwrap()
+ ),
+ rustc_cfg: vec![],
+ };
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+ let db = host.raw_database();
+ let krates = Crate::all(db);
+ let root_crate = krates.iter().cloned().find(|krate| krate.origin(db).is_local()).unwrap();
+ let root_file = root_crate.root_file(db);
+ Ok(Self {
+ host,
+ root_file,
+ pass_count: 0,
+ ignore_count: 0,
+ fail_count: 0,
+ stopwatch: StopWatch::start(),
+ })
+ }
+
+ fn test(&mut self, p: PathBuf) {
+ if p.parent().unwrap().file_name().unwrap() == "auxiliary" {
+ // These are not tests
+ return;
+ }
+ if IGNORED_TESTS.iter().any(|ig| p.file_name().is_some_and(|x| x == *ig)) {
+ println!("{p:?} IGNORE");
+ self.ignore_count += 1;
+ return;
+ }
+ let stderr_path = p.with_extension("stderr");
+ let expected = if stderr_path.exists() {
+ detect_errors_from_rustc_stderr_file(stderr_path)
+ } else {
+ HashMap::new()
+ };
+ let text = read_to_string(&p).unwrap();
+ let mut change = Change::new();
+ // Ignore unstable tests, since they move too fast and we do not intend to support all of them.
+ let mut ignore_test = text.contains("#![feature");
+ // Ignore test with extern crates, as this infra don't support them yet.
+ ignore_test |= text.contains("// aux-build:") || text.contains("// aux-crate:");
+ // Ignore test with extern modules similarly.
+ ignore_test |= text.contains("mod ");
+ // These should work, but they don't, and I don't know why, so ignore them.
+ ignore_test |= text.contains("extern crate proc_macro");
+ let should_have_no_error = text.contains("// check-pass")
+ || text.contains("// build-pass")
+ || text.contains("// run-pass");
+ change.change_file(self.root_file, Some(Arc::from(text)));
+ self.host.apply_change(change);
+ let diagnostic_config = DiagnosticsConfig::test_sample();
+ let diags = self
+ .host
+ .analysis()
+ .diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file)
+ .unwrap();
+ let mut actual = HashMap::new();
+ for diag in diags {
+ if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) {
+ continue;
+ }
+ if !should_have_no_error && !SUPPORTED_DIAGNOSTICS.contains(&diag.code) {
+ continue;
+ }
+ *actual.entry(diag.code).or_insert(0) += 1;
+ }
+ // Ignore tests with diagnostics that we don't emit.
+ ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k));
+ if ignore_test {
+ println!("{p:?} IGNORE");
+ self.ignore_count += 1;
+ } else if actual == expected {
+ println!("{p:?} PASS");
+ self.pass_count += 1;
+ } else {
+ println!("{p:?} FAIL");
+ println!("actual (r-a) = {:?}", actual);
+ println!("expected (rustc) = {:?}", expected);
+ self.fail_count += 1;
+ }
+ }
+
+ fn report(&mut self) {
+ println!(
+ "Pass count = {}, Fail count = {}, Ignore count = {}",
+ self.pass_count, self.fail_count, self.ignore_count
+ );
+ println!("Testing time and memory = {}", self.stopwatch.elapsed());
+ report_metric("rustc failed tests", self.fail_count, "#");
+ report_metric("rustc testing time", self.stopwatch.elapsed().time.as_millis() as u64, "ms");
+ }
+}
+
+/// These tests break rust-analyzer (either by panicking or hanging) so we should ignore them.
+const IGNORED_TESTS: &[&str] = &[
+ "trait-with-missing-associated-type-restriction.rs", // #15646
+ "trait-with-missing-associated-type-restriction-fixable.rs", // #15646
+ "resolve-self-in-impl.rs",
+ "basic.rs", // ../rust/tests/ui/associated-type-bounds/return-type-notation/basic.rs
+ "issue-26056.rs",
+ "float-field.rs",
+ "invalid_operator_trait.rs",
+ "type-alias-impl-trait-assoc-dyn.rs",
+ "deeply-nested_closures.rs", // exponential time
+ "hang-on-deeply-nested-dyn.rs", // exponential time
+ "dyn-rpit-and-let.rs", // unexpected free variable with depth `^1.0` with outer binder ^0
+ "issue-16098.rs", // Huge recursion limit for macros?
+ "issue-83471.rs", // crates/hir-ty/src/builder.rs:78:9: assertion failed: self.remaining() > 0
+];
+
+const SUPPORTED_DIAGNOSTICS: &[DiagnosticCode] = &[
+ DiagnosticCode::RustcHardError("E0023"),
+ DiagnosticCode::RustcHardError("E0046"),
+ DiagnosticCode::RustcHardError("E0063"),
+ DiagnosticCode::RustcHardError("E0107"),
+ DiagnosticCode::RustcHardError("E0117"),
+ DiagnosticCode::RustcHardError("E0133"),
+ DiagnosticCode::RustcHardError("E0210"),
+ DiagnosticCode::RustcHardError("E0268"),
+ DiagnosticCode::RustcHardError("E0308"),
+ DiagnosticCode::RustcHardError("E0384"),
+ DiagnosticCode::RustcHardError("E0407"),
+ DiagnosticCode::RustcHardError("E0432"),
+ DiagnosticCode::RustcHardError("E0451"),
+ DiagnosticCode::RustcHardError("E0507"),
+ DiagnosticCode::RustcHardError("E0583"),
+ DiagnosticCode::RustcHardError("E0559"),
+ DiagnosticCode::RustcHardError("E0616"),
+ DiagnosticCode::RustcHardError("E0618"),
+ DiagnosticCode::RustcHardError("E0624"),
+ DiagnosticCode::RustcHardError("E0774"),
+ DiagnosticCode::RustcHardError("E0767"),
+ DiagnosticCode::RustcHardError("E0777"),
+];
+
+impl flags::RustcTests {
+ pub fn run(self) -> Result<()> {
+ let mut tester = Tester::new()?;
+ let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui"));
+ for i in walk_dir {
+ let i = i?;
+ let p = i.into_path();
+ if let Some(f) = &self.filter {
+ if !p.as_os_str().to_string_lossy().contains(f) {
+ continue;
+ }
+ }
+ if p.extension().map_or(true, |x| x != "rs") {
+ continue;
+ }
+ if let Err(e) = std::panic::catch_unwind({
+ let tester = AssertUnwindSafe(&mut tester);
+ let p = p.clone();
+ move || {
+ let tester = tester;
+ tester.0.test(p);
+ }
+ }) {
+ println!("panic detected at test {:?}", p);
+ std::panic::resume_unwind(e);
+ }
+ }
+ tester.report();
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index 8c056fff0..30e11402c 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -11,10 +11,8 @@ use ide::{
TokenStaticData,
};
use ide_db::LineIndexDatabase;
-use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use scip::types as scip_types;
-use std::env;
use crate::{
cli::flags,
@@ -25,8 +23,6 @@ impl flags::Scip {
pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating SCIP start...");
let now = Instant::now();
- let mut cargo_config = CargoConfig::default();
- cargo_config.sysroot = Some(RustLibSource::Discover);
let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
let load_cargo_config = LoadCargoConfig {
@@ -34,14 +30,27 @@ impl flags::Scip {
with_proc_macro_server: ProcMacroServerChoice::Sysroot,
prefill_caches: true,
};
- let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path));
- let rootpath = path.normalize();
- let manifest = ProjectManifest::discover_single(&path)?;
+ let root = vfs::AbsPathBuf::assert(std::env::current_dir()?.join(&self.path)).normalize();
- let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+ let mut config = crate::config::Config::new(
+ root.clone(),
+ lsp_types::ClientCapabilities::default(),
+ /* workspace_roots = */ vec![],
+ /* is_visual_studio_code = */ false,
+ );
- let (host, vfs, _) =
- load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+ if let Some(p) = self.config_path {
+ let mut file = std::io::BufReader::new(std::fs::File::open(p)?);
+ let json = serde_json::from_reader(&mut file)?;
+ config.update(json)?;
+ }
+ let cargo_config = config.cargo();
+ let (host, vfs, _) = load_workspace_at(
+ root.as_path().as_ref(),
+ &cargo_config,
+ &load_cargo_config,
+ &no_progress,
+ )?;
let db = host.raw_database();
let analysis = host.analysis();
@@ -58,8 +67,7 @@ impl flags::Scip {
.into(),
project_root: format!(
"file://{}",
- path.normalize()
- .as_os_str()
+ root.as_os_str()
.to_str()
.ok_or(anyhow::format_err!("Unable to normalize project_root path"))?
),
@@ -80,7 +88,7 @@ impl flags::Scip {
new_symbol
};
- let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) {
+ let relative_path = match get_relative_filepath(&vfs, &root, file_id) {
Some(relative_path) => relative_path,
None => continue,
};
@@ -125,6 +133,10 @@ impl flags::Scip {
documentation: documentation.unwrap_or_default(),
relationships: Vec::new(),
special_fields: Default::default(),
+ kind: Default::default(),
+ display_name: String::new(),
+ signature_documentation: Default::default(),
+ enclosing_symbol: String::new(),
};
symbols.push(symbol_info)
@@ -139,6 +151,7 @@ impl flags::Scip {
syntax_kind: Default::default(),
diagnostics: Vec::new(),
special_fields: Default::default(),
+ enclosing_range: Vec::new(),
});
});
@@ -152,6 +165,7 @@ impl flags::Scip {
occurrences,
symbols,
special_fields: Default::default(),
+ text: String::new(),
});
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index ea3a21241..258f74106 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -91,6 +91,12 @@ config_data! {
/// and should therefore include `--message-format=json` or a similar
/// option.
///
+ /// If there are multiple linked projects/workspaces, this command is invoked for
+ /// each of them, with the working directory being the workspace root
+ /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten
+ /// by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and
+ /// `#rust-analyzer.cargo.buildScripts.invocationLocation#`.
+ ///
/// By default, a cargo invocation will be constructed for the configured
/// targets and features, with the following base command line:
///
@@ -182,9 +188,11 @@ config_data! {
/// Cargo, you might also want to change
/// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
///
- /// If there are multiple linked projects, this command is invoked for
- /// each of them, with the working directory being the project root
- /// (i.e., the folder containing the `Cargo.toml`).
+ /// If there are multiple linked projects/workspaces, this command is invoked for
+ /// each of them, with the working directory being the workspace root
+ /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten
+ /// by changing `#rust-analyzer.cargo.check.invocationStrategy#` and
+ /// `#rust-analyzer.cargo.check.invocationLocation#`.
///
/// An example command would be:
///
@@ -209,6 +217,8 @@ config_data! {
completion_autoself_enable: bool = "true",
/// Whether to add parenthesis and argument snippets when completing function.
completion_callable_snippets: CallableCompletionDef = "\"fill_arguments\"",
+ /// Whether to show full function/method signatures in completion docs.
+ completion_fullFunctionSignatures_enable: bool = "false",
/// Maximum number of completions to return. If `None`, the limit is infinite.
completion_limit: Option<usize> = "null",
/// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
@@ -342,7 +352,9 @@ config_data! {
/// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
imports_merge_glob: bool = "true",
/// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
- imports_prefer_no_std: bool = "false",
+ imports_preferNoStd | imports_prefer_no_std: bool = "false",
+ /// Whether to prefer import paths containing a `prelude` module.
+ imports_preferPrelude: bool = "false",
/// The path structure for newly inserted paths to use.
imports_prefix: ImportPrefixDef = "\"plain\"",
@@ -369,6 +381,8 @@ config_data! {
inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false",
/// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"",
+ /// Whether to show implicit drop hints.
+ inlayHints_implicitDrops_enable: bool = "false",
/// Whether to show inlay type hints for elided lifetimes in function signatures.
inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
/// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
@@ -470,6 +484,14 @@ config_data! {
/// tests or binaries. For example, it may be `--release`.
runnables_extraArgs: Vec<String> = "[]",
+ /// Optional path to a rust-analyzer specific target directory.
+ /// This prevents rust-analyzer's `cargo check` from locking the `Cargo.lock`
+ /// at the expense of duplicating build artifacts.
+ ///
+ /// Set to `true` to use a subdirectory of the existing target directory or
+ /// set to a path relative to the workspace to use that path.
+ rust_analyzerTargetDir: Option<TargetDirectory> = "null",
+
/// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
/// projects, or "discover" to try to automatically find it if the `rustc-dev` component
/// is installed.
@@ -565,6 +587,7 @@ pub struct Config {
data: ConfigData,
detached_files: Vec<AbsPathBuf>,
snippets: Vec<Snippet>,
+ is_visual_studio_code: bool,
}
type ParallelCachePrimingNumThreads = u8;
@@ -755,11 +778,14 @@ impl fmt::Display for ConfigError {
}
}
+impl std::error::Error for ConfigError {}
+
impl Config {
pub fn new(
root_path: AbsPathBuf,
caps: ClientCapabilities,
workspace_roots: Vec<AbsPathBuf>,
+ is_visual_studio_code: bool,
) -> Self {
Config {
caps,
@@ -769,6 +795,7 @@ impl Config {
root_path,
snippets: Default::default(),
workspace_roots,
+ is_visual_studio_code,
}
}
@@ -1094,7 +1121,8 @@ impl Config {
ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
},
insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_prefer_no_std,
+ prefer_no_std: self.data.imports_preferNoStd,
+ prefer_prelude: self.data.imports_preferPrelude,
}
}
@@ -1248,6 +1276,7 @@ impl Config {
run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
extra_args: self.data.cargo_extraArgs.clone(),
extra_env: self.data.cargo_extraEnv.clone(),
+ target_dir: self.target_dir_from_config(),
}
}
@@ -1320,10 +1349,22 @@ impl Config {
extra_args: self.check_extra_args(),
extra_env: self.check_extra_env(),
ansi_color_output: self.color_diagnostic_output(),
+ target_dir: self.target_dir_from_config(),
},
}
}
+ // FIXME: This should be an AbsolutePathBuf
+ fn target_dir_from_config(&self) -> Option<PathBuf> {
+ self.data.rust_analyzerTargetDir.as_ref().and_then(|target_dir| match target_dir {
+ TargetDirectory::UseSubdirectory(yes) if *yes => {
+ Some(PathBuf::from("target/rust-analyzer"))
+ }
+ TargetDirectory::UseSubdirectory(_) => None,
+ TargetDirectory::Directory(dir) => Some(dir.clone()),
+ })
+ }
+
pub fn check_on_save(&self) -> bool {
self.data.checkOnSave
}
@@ -1353,6 +1394,7 @@ impl Config {
type_hints: self.data.inlayHints_typeHints_enable,
parameter_hints: self.data.inlayHints_parameterHints_enable,
chaining_hints: self.data.inlayHints_chainingHints_enable,
+ implicit_drop_hints: self.data.inlayHints_implicitDrops_enable,
discriminant_hints: match self.data.inlayHints_discriminantHints_enable {
DiscriminantHintsDef::Always => ide::DiscriminantHints::Always,
DiscriminantHintsDef::Never => ide::DiscriminantHints::Never,
@@ -1444,13 +1486,15 @@ impl Config {
&& completion_item_edit_resolve(&self.caps),
enable_self_on_the_fly: self.data.completion_autoself_enable,
enable_private_editable: self.data.completion_privateEditable_enable,
+ full_function_signatures: self.data.completion_fullFunctionSignatures_enable,
callable: match self.data.completion_callable_snippets {
CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
CallableCompletionDef::None => None,
},
insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_prefer_no_std,
+ prefer_no_std: self.data.imports_preferNoStd,
+ prefer_prelude: self.data.imports_preferPrelude,
snippet_cap: SnippetCap::new(try_or_def!(
self.caps
.text_document
@@ -1479,7 +1523,8 @@ impl Config {
snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
allowed: None,
insert_use: self.insert_use_config(),
- prefer_no_std: self.data.imports_prefer_no_std,
+ prefer_no_std: self.data.imports_preferNoStd,
+ prefer_prelude: self.data.imports_preferPrelude,
assist_emit_must_use: self.data.assist_emitMustUse,
}
}
@@ -1667,6 +1712,12 @@ impl Config {
pub fn typing_autoclose_angle(&self) -> bool {
self.data.typing_autoClosingAngleBrackets_enable
}
+
+ // FIXME: VSCode seems to work wrong sometimes, see https://github.com/microsoft/vscode/issues/193124
+ // hence, distinguish it for now.
+ pub fn is_visual_studio_code(&self) -> bool {
+ self.is_visual_studio_code
+ }
}
// Deserialization definitions
@@ -2015,6 +2066,14 @@ pub enum MemoryLayoutHoverRenderKindDef {
Both,
}
+#[derive(Deserialize, Debug, Clone, PartialEq)]
+#[serde(rename_all = "snake_case")]
+#[serde(untagged)]
+pub enum TargetDirectory {
+ UseSubdirectory(bool),
+ Directory(PathBuf),
+}
+
macro_rules! _config_data {
(struct $name:ident {
$(
@@ -2443,6 +2502,19 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
},
],
},
+ "Option<TargetDirectory>" => set! {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "string"
+ },
+ ],
+ },
_ => panic!("missing entry for {ty}: {default}"),
}
@@ -2555,8 +2627,12 @@ mod tests {
#[test]
fn proc_macro_srv_null() {
- let mut config =
- Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
config
.update(serde_json::json!({
"procMacro_server": null,
@@ -2567,8 +2643,12 @@ mod tests {
#[test]
fn proc_macro_srv_abs() {
- let mut config =
- Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
config
.update(serde_json::json!({
"procMacro": {"server": project_root().display().to_string()}
@@ -2579,8 +2659,12 @@ mod tests {
#[test]
fn proc_macro_srv_rel() {
- let mut config =
- Config::new(AbsPathBuf::try_from(project_root()).unwrap(), Default::default(), vec![]);
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
config
.update(serde_json::json!({
"procMacro": {"server": "./server"}
@@ -2591,4 +2675,67 @@ mod tests {
Some(AbsPathBuf::try_from(project_root().join("./server")).unwrap())
);
}
+
+ #[test]
+ fn cargo_target_dir_unset() {
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
+ config
+ .update(serde_json::json!({
+ "rust": { "analyzerTargetDir": null }
+ }))
+ .unwrap();
+ assert_eq!(config.data.rust_analyzerTargetDir, None);
+ assert!(
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == None)
+ );
+ }
+
+ #[test]
+ fn cargo_target_dir_subdir() {
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
+ config
+ .update(serde_json::json!({
+ "rust": { "analyzerTargetDir": true }
+ }))
+ .unwrap();
+ assert_eq!(
+ config.data.rust_analyzerTargetDir,
+ Some(TargetDirectory::UseSubdirectory(true))
+ );
+ assert!(
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(PathBuf::from("target/rust-analyzer")))
+ );
+ }
+
+ #[test]
+ fn cargo_target_dir_relative_dir() {
+ let mut config = Config::new(
+ AbsPathBuf::try_from(project_root()).unwrap(),
+ Default::default(),
+ vec![],
+ false,
+ );
+ config
+ .update(serde_json::json!({
+ "rust": { "analyzerTargetDir": "other_folder" }
+ }))
+ .unwrap();
+ assert_eq!(
+ config.data.rust_analyzerTargetDir,
+ Some(TargetDirectory::Directory(PathBuf::from("other_folder")))
+ );
+ assert!(
+ matches!(config.flycheck(), FlycheckConfig::CargoCommand { target_dir, .. } if target_dir == Some(PathBuf::from("other_folder")))
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
index 71701ef16..f80beb9ca 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
@@ -5,6 +5,7 @@ use std::mem;
use ide::FileId;
use ide_db::FxHashMap;
+use itertools::Itertools;
use nohash_hasher::{IntMap, IntSet};
use rustc_hash::FxHashSet;
use triomphe::Arc;
@@ -129,8 +130,28 @@ pub(crate) fn fetch_native_diagnostics(
) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> {
let _p = profile::span("fetch_native_diagnostics");
let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned());
- subscriptions
- .into_iter()
+
+ let convert_diagnostic =
+ |line_index: &crate::line_index::LineIndex, d: ide::Diagnostic| lsp_types::Diagnostic {
+ range: lsp::to_proto::range(&line_index, d.range.range),
+ severity: Some(lsp::to_proto::diagnostic_severity(d.severity)),
+ code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())),
+ code_description: Some(lsp_types::CodeDescription {
+ href: lsp_types::Url::parse(&d.code.url()).unwrap(),
+ }),
+ source: Some("rust-analyzer".to_string()),
+ message: d.message,
+ related_information: None,
+ tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]),
+ data: None,
+ };
+
+ // the diagnostics produced may point to different files not requested by the concrete request,
+ // put those into here and filter later
+ let mut odd_ones = Vec::new();
+ let mut diagnostics = subscriptions
+ .iter()
+ .copied()
.filter_map(|file_id| {
let line_index = snapshot.file_line_index(file_id).ok()?;
let diagnostics = snapshot
@@ -142,21 +163,39 @@ pub(crate) fn fetch_native_diagnostics(
)
.ok()?
.into_iter()
- .map(move |d| lsp_types::Diagnostic {
- range: lsp::to_proto::range(&line_index, d.range),
- severity: Some(lsp::to_proto::diagnostic_severity(d.severity)),
- code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())),
- code_description: Some(lsp_types::CodeDescription {
- href: lsp_types::Url::parse(&d.code.url()).unwrap(),
- }),
- source: Some("rust-analyzer".to_string()),
- message: d.message,
- related_information: None,
- tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]),
- data: None,
+ .filter_map(|d| {
+ if d.range.file_id == file_id {
+ Some(convert_diagnostic(&line_index, d))
+ } else {
+ odd_ones.push(d);
+ None
+ }
})
.collect::<Vec<_>>();
Some((file_id, diagnostics))
})
- .collect()
+ .collect::<Vec<_>>();
+
+ // Add back any diagnostics that point to files we are subscribed to
+ for (file_id, group) in odd_ones
+ .into_iter()
+ .sorted_by_key(|it| it.range.file_id)
+ .group_by(|it| it.range.file_id)
+ .into_iter()
+ {
+ if !subscriptions.contains(&file_id) {
+ continue;
+ }
+ let Some((_, diagnostics)) = diagnostics.iter_mut().find(|&&mut (id, _)| id == file_id)
+ else {
+ continue;
+ };
+ let Some(line_index) = snapshot.file_line_index(file_id).ok() else {
+ break;
+ };
+ for diagnostic in group {
+ diagnostics.push(convert_diagnostic(&line_index, diagnostic));
+ }
+ }
+ diagnostics
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
index 731580557..f8bc66ff8 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -538,7 +538,12 @@ mod tests {
let (sender, _) = crossbeam_channel::unbounded();
let state = GlobalState::new(
sender,
- Config::new(workspace_root.to_path_buf(), ClientCapabilities::default(), Vec::new()),
+ Config::new(
+ workspace_root.to_path_buf(),
+ ClientCapabilities::default(),
+ Vec::new(),
+ false,
+ ),
);
let snap = state.snapshot();
let mut actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index c09f57252..0f31fe160 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -187,11 +187,9 @@ impl GlobalState {
config_errors: Default::default(),
proc_macro_changed: false,
- // FIXME: use `Arc::from_iter` when it becomes available
- proc_macro_clients: Arc::from(Vec::new()),
+ proc_macro_clients: Arc::from_iter([]),
- // FIXME: use `Arc::from_iter` when it becomes available
- flycheck: Arc::from(Vec::new()),
+ flycheck: Arc::from_iter([]),
flycheck_sender,
flycheck_receiver,
last_flycheck_error: None,
@@ -202,7 +200,7 @@ impl GlobalState {
vfs_progress_n_total: 0,
vfs_progress_n_done: 0,
- workspaces: Arc::new(Vec::new()),
+ workspaces: Arc::from(Vec::new()),
crate_graph_file_dependencies: FxHashSet::default(),
fetch_workspaces_queue: OpQueue::default(),
fetch_build_data_queue: OpQueue::default(),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index b8a1a39be..d8a590c80 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -4,6 +4,7 @@
use std::{
fs,
io::Write as _,
+ path::PathBuf,
process::{self, Stdio},
};
@@ -11,8 +12,8 @@ use anyhow::Context;
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
- HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory,
- Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
+ HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit,
+ ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
};
use ide_db::SymbolKind;
use lsp_server::ErrorCode;
@@ -50,8 +51,7 @@ use crate::{
};
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- // FIXME: use `Arc::from_iter` when it becomes available
- state.proc_macro_clients = Arc::from(Vec::new());
+ state.proc_macro_clients = Arc::from_iter([]);
state.proc_macro_changed = false;
state.fetch_workspaces_queue.request_op("reload workspace request".to_string(), false);
@@ -59,8 +59,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow:
}
pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
- // FIXME: use `Arc::from_iter` when it becomes available
- state.proc_macro_clients = Arc::from(Vec::new());
+ state.proc_macro_clients = Arc::from_iter([]);
state.proc_macro_changed = false;
state.fetch_build_data_queue.request_op("rebuild proc macros request".to_string(), ());
@@ -1410,7 +1409,7 @@ pub(crate) fn handle_inlay_hints(
let inlay_hints_config = snap.config.inlay_hints();
Ok(Some(
snap.analysis
- .inlay_hints(&inlay_hints_config, file_id, Some(range))?
+ .inlay_hints(&inlay_hints_config, file_id, Some(RangeLimit::Fixed(range)))?
.into_iter()
.map(|it| {
to_proto::inlay_hint(
@@ -1437,26 +1436,17 @@ pub(crate) fn handle_inlay_hints_resolve(
};
let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
- let file_id = FileId(resolve_data.file_id);
+ let file_id = FileId::from_raw(resolve_data.file_id);
anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
let line_index = snap.file_line_index(file_id)?;
- let range = from_proto::text_range(
- &line_index,
- lsp_types::Range { start: original_hint.position, end: original_hint.position },
- )?;
- let range_start = range.start();
- let range_end = range.end();
- let large_range = TextRange::new(
- range_start.checked_sub(1.into()).unwrap_or(range_start),
- range_end.checked_add(1.into()).unwrap_or(range_end),
- );
+ let hint_position = from_proto::offset(&line_index, original_hint.position)?;
let mut forced_resolve_inlay_hints_config = snap.config.inlay_hints();
forced_resolve_inlay_hints_config.fields_to_resolve = InlayFieldsToResolve::empty();
let resolve_hints = snap.analysis.inlay_hints(
&forced_resolve_inlay_hints_config,
file_id,
- Some(large_range),
+ Some(RangeLimit::NearestParent(hint_position)),
)?;
let mut resolved_hints = resolve_hints
@@ -1995,7 +1985,25 @@ fn run_rustfmt(
cmd
}
RustfmtConfig::CustomCommand { command, args } => {
- let mut cmd = process::Command::new(command);
+ let cmd = PathBuf::from(&command);
+ let workspace = CargoTargetSpec::for_file(&snap, file_id)?;
+ let mut cmd = match workspace {
+ Some(spec) => {
+ // approach: if the command name contains a path separator, join it with the workspace root.
+ // however, if the path is absolute, joining will result in the absolute path being preserved.
+ // as a fallback, rely on $PATH-based discovery.
+ let cmd_path =
+ if cfg!(windows) && command.contains(&[std::path::MAIN_SEPARATOR, '/']) {
+ spec.workspace_root.join(cmd).into()
+ } else if command.contains(std::path::MAIN_SEPARATOR) {
+ spec.workspace_root.join(cmd).into()
+ } else {
+ cmd
+ };
+ process::Command::new(cmd_path)
+ }
+ None => process::Command::new(cmd),
+ };
cmd.envs(snap.config.extra_env());
cmd.args(args);
@@ -2003,6 +2011,8 @@ fn run_rustfmt(
}
};
+ tracing::debug!(?command, "created format command");
+
// try to chdir to the file so we can respect `rustfmt.toml`
// FIXME: use `rustfmt --config-path` once
// https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index 5a11012b9..41ff17f5e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -30,9 +30,12 @@ fn integrated_highlighting_benchmark() {
// Load rust-analyzer itself.
let workspace_to_load = project_root();
- let file = "./crates/ide-db/src/apply_change.rs";
+ let file = "./crates/rust-analyzer/src/config.rs";
- let cargo_config = CargoConfig::default();
+ let cargo_config = CargoConfig {
+ sysroot: Some(project_model::RustLibSource::Discover),
+ ..CargoConfig::default()
+ };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::None,
@@ -57,7 +60,6 @@ fn integrated_highlighting_benchmark() {
}
profile::init_from("*>100");
- // let _s = profile::heartbeat_span();
{
let _it = stdx::timeit("change");
@@ -86,7 +88,10 @@ fn integrated_completion_benchmark() {
let workspace_to_load = project_root();
let file = "./crates/hir/src/lib.rs";
- let cargo_config = CargoConfig::default();
+ let cargo_config = CargoConfig {
+ sysroot: Some(project_model::RustLibSource::Discover),
+ ..CargoConfig::default()
+ };
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro_server: ProcMacroServerChoice::None,
@@ -104,10 +109,46 @@ fn integrated_completion_benchmark() {
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
};
+ // kick off parsing and index population
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ + "sel".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::from(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
{
- let _it = stdx::timeit("initial");
+ let _span = profile::cpu_span();
let analysis = host.analysis();
- analysis.highlight_as_html(file_id, false).unwrap();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ full_function_signatures: false,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ prefer_no_std: false,
+ prefer_prelude: true,
+ limit: None,
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
}
profile::init_from("*>5");
@@ -117,8 +158,8 @@ fn integrated_completion_benchmark() {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
- patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
- + "sel".len();
+ patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
+ + ";sel".len();
let mut change = Change::new();
change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change);
@@ -134,6 +175,7 @@ fn integrated_completion_benchmark() {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: true,
+ full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
@@ -145,6 +187,7 @@ fn integrated_completion_benchmark() {
},
snippets: Vec::new(),
prefer_no_std: false,
+ prefer_prelude: true,
limit: None,
};
let position =
@@ -173,6 +216,7 @@ fn integrated_completion_benchmark() {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: true,
+ full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
insert_use: InsertUseConfig {
@@ -184,6 +228,7 @@ fn integrated_completion_benchmark() {
},
snippets: Vec::new(),
prefer_no_std: false,
+ prefer_prelude: true,
limit: None,
};
let position =
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
index 6c62577f6..29bc0b80d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -9,7 +9,7 @@
//! The `cli` submodule implements some batch-processing analysis, primarily as
//! a debugging aid.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
pub mod cli;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index 23074493a..dae560c5d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -1,7 +1,7 @@
//! Conversion of rust-analyzer specific types to lsp_types equivalents.
use std::{
iter::once,
- path,
+ mem, path,
sync::atomic::{AtomicU32, Ordering},
};
@@ -301,9 +301,11 @@ fn completion_item(
if config.completion_label_details_support() {
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
- detail: None,
+ detail: item.label_detail.as_ref().map(ToString::to_string),
description: lsp_item.detail.clone(),
});
+ } else if let Some(label_detail) = item.label_detail {
+ lsp_item.label.push_str(label_detail.as_str());
}
set_score(&mut lsp_item, max_relevance, item.relevance);
@@ -443,17 +445,19 @@ pub(crate) fn inlay_hint(
file_id: FileId,
inlay_hint: InlayHint,
) -> Cancellable<lsp_types::InlayHint> {
+ let is_visual_studio_code = snap.config.is_visual_studio_code();
let needs_resolve = inlay_hint.needs_resolve;
let (label, tooltip, mut something_to_resolve) =
inlay_hint_label(snap, fields_to_resolve, needs_resolve, inlay_hint.label)?;
- let text_edits = if needs_resolve && fields_to_resolve.resolve_text_edits {
- something_to_resolve |= inlay_hint.text_edit.is_some();
- None
- } else {
- inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it))
- };
+ let text_edits =
+ if !is_visual_studio_code && needs_resolve && fields_to_resolve.resolve_text_edits {
+ something_to_resolve |= inlay_hint.text_edit.is_some();
+ None
+ } else {
+ inlay_hint.text_edit.map(|it| text_edit_vec(line_index, it))
+ };
let data = if needs_resolve && something_to_resolve {
- Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.0 }).unwrap())
+ Some(to_value(lsp_ext::InlayHintResolveData { file_id: file_id.index() }).unwrap())
} else {
None
};
@@ -1121,13 +1125,20 @@ pub(crate) fn snippet_text_document_ops(
pub(crate) fn snippet_workspace_edit(
snap: &GlobalStateSnapshot,
- source_change: SourceChange,
+ mut source_change: SourceChange,
) -> Cancellable<lsp_ext::SnippetWorkspaceEdit> {
let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
- for op in source_change.file_system_edits {
- let ops = snippet_text_document_ops(snap, op)?;
- document_changes.extend_from_slice(&ops);
+ for op in &mut source_change.file_system_edits {
+ if let FileSystemEdit::CreateFile { dst, initial_contents } = op {
+ // replace with a placeholder to avoid cloneing the edit
+ let op = FileSystemEdit::CreateFile {
+ dst: dst.clone(),
+ initial_contents: mem::take(initial_contents),
+ };
+ let ops = snippet_text_document_ops(snap, op)?;
+ document_changes.extend_from_slice(&ops);
+ }
}
for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
let edit = snippet_text_document_edit(
@@ -1139,6 +1150,12 @@ pub(crate) fn snippet_workspace_edit(
)?;
document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
}
+ for op in source_change.file_system_edits {
+ if !matches!(op, FileSystemEdit::CreateFile { .. }) {
+ let ops = snippet_text_document_ops(snap, op)?;
+ document_changes.extend_from_slice(&ops);
+ }
+ }
let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
changes: None,
document_changes: Some(document_changes),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index 3fae08b82..7ab528f49 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -22,6 +22,7 @@ use ide_db::{
base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros},
FxHashMap,
};
+use itertools::Itertools;
use load_cargo::{load_proc_macro, ProjectFolders};
use proc_macro_api::ProcMacroServer;
use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
@@ -227,16 +228,12 @@ impl GlobalState {
let mut i = 0;
while i < workspaces.len() {
if let Ok(w) = &workspaces[i] {
- let dupes: Vec<_> = workspaces
+ let dupes: Vec<_> = workspaces[i + 1..]
.iter()
- .enumerate()
- .skip(i + 1)
- .filter_map(|(i, it)| {
- it.as_ref().ok().filter(|ws| ws.eq_ignore_build_data(w)).map(|_| i)
- })
+ .positions(|it| it.as_ref().is_ok_and(|ws| ws.eq_ignore_build_data(w)))
.collect();
dupes.into_iter().rev().for_each(|d| {
- _ = workspaces.remove(d);
+ _ = workspaces.remove(d + i + 1);
});
}
i += 1;
@@ -380,7 +377,6 @@ impl GlobalState {
ws
})
.collect::<Vec<_>>();
-
// Workspaces are the same, but we've updated build data.
self.workspaces = Arc::new(workspaces);
} else {
@@ -441,28 +437,22 @@ impl GlobalState {
if self.config.expand_proc_macros() {
tracing::info!("Spawning proc-macro servers");
- // FIXME: use `Arc::from_iter` when it becomes available
- self.proc_macro_clients = Arc::from(
- self.workspaces
- .iter()
- .map(|ws| {
- let path = match self.config.proc_macro_srv() {
- Some(path) => path,
- None => ws.find_sysroot_proc_macro_srv()?,
- };
-
- tracing::info!("Using proc-macro server at {path}");
- ProcMacroServer::spawn(path.clone()).map_err(|err| {
- tracing::error!(
- "Failed to run proc-macro server from path {path}, error: {err:?}",
- );
- anyhow::format_err!(
- "Failed to run proc-macro server from path {path}, error: {err:?}",
- )
- })
- })
- .collect::<Vec<_>>(),
- )
+ self.proc_macro_clients = Arc::from_iter(self.workspaces.iter().map(|ws| {
+ let path = match self.config.proc_macro_srv() {
+ Some(path) => path,
+ None => ws.find_sysroot_proc_macro_srv()?,
+ };
+
+ tracing::info!("Using proc-macro server at {path}");
+ ProcMacroServer::spawn(path.clone()).map_err(|err| {
+ tracing::error!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
+ );
+ anyhow::format_err!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
+ )
+ })
+ }))
};
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
index d59914298..ec8e5c6dd 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -8,7 +8,7 @@
//! specific JSON shapes here -- there's little value in such tests, as we can't
//! be sure without a real client anyway.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[cfg(not(feature = "in-rust-tree"))]
mod sourcegen;
@@ -984,6 +984,11 @@ fn main() {}
//- /src/old_file.rs
//- /src/old_folder/mod.rs
+mod nested;
+
+//- /src/old_folder/nested.rs
+struct foo;
+use crate::old_folder::nested::foo as bar;
//- /src/from_mod/mod.rs
@@ -1080,6 +1085,27 @@ fn main() {}
"newText": "new_folder"
}
]
+ },
+ {
+ "textDocument": {
+ "uri": format!("file://{}", tmp_dir_path.join("src").join("old_folder").join("nested.rs").to_str().unwrap().to_string().replace("C:\\", "/c:/").replace('\\', "/")),
+ "version": null
+ },
+ "edits": [
+ {
+ "range": {
+ "start": {
+ "line": 1,
+ "character": 11
+ },
+ "end": {
+ "line": 1,
+ "character": 21
+ }
+ },
+ "newText": "new_folder"
+ }
+ ]
}
]
}),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
index e49b5768f..106b99cb9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -150,6 +150,7 @@ impl Project<'_> {
..Default::default()
},
roots,
+ false,
);
config.update(self.config).expect("invalid config");
config.rediscover_workspaces();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
index 8b5c92c66..dba336ea7 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -157,7 +157,6 @@ Apache-2.0 OR MIT
Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
Apache-2.0/MIT
BSD-3-Clause
-BlueOak-1.0.0 OR MIT OR Apache-2.0
CC0-1.0
ISC
MIT
@@ -251,6 +250,7 @@ fn check_dbg(path: &Path, text: &str) {
// We have .dbg postfix
"ide-completion/src/completions/postfix.rs",
"ide-completion/src/completions/keyword.rs",
+ "ide-completion/src/tests/expression.rs",
"ide-completion/src/tests/proc_macros.rs",
// The documentation in string literals may contain anything for its own purposes
"ide-completion/src/lib.rs",
@@ -300,6 +300,8 @@ fn check_test_attrs(path: &Path, text: &str) {
// This file.
"slow-tests/tidy.rs",
"test-utils/src/fixture.rs",
+ // Generated code from lints contains doc tests in string literals.
+ "ide-db/src/generated/lints.rs",
];
if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
panic!(
@@ -315,7 +317,7 @@ fn check_trailing_ws(path: &Path, text: &str) {
return;
}
for (line_number, line) in text.lines().enumerate() {
- if line.chars().last().map(char::is_whitespace) == Some(true) {
+ if line.chars().last().is_some_and(char::is_whitespace) {
panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
}
}
diff --git a/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml b/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml
new file mode 100644
index 000000000..1b3b6ec73
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rustc-dependencies/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "rustc-dependencies"
+version = "0.0.0"
+description = "TBD"
+
+rust-version.workspace = true
+edition.workspace = true
+license.workspace = true
+authors.workspace = true
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+ra-ap-rustc_lexer = { version = "0.21.0" }
+ra-ap-rustc_parse_format = { version = "0.21.0", default-features = false }
+ra-ap-rustc_index = { version = "0.21.0", default-features = false }
+ra-ap-rustc_abi = { version = "0.21.0", default-features = false }
+
+[features]
+in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs b/src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs
new file mode 100644
index 000000000..13fcbc491
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rustc-dependencies/src/lib.rs
@@ -0,0 +1,48 @@
+//! A wrapper around rustc internal crates, which enables switching between compiler provided
+//! ones and stable ones published in crates.io
+
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_lexer;
+
+pub mod lexer {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_lexer::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_lexer::*;
+}
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_parse_format;
+
+pub mod parse_format {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_parse_format::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_parse_format::*;
+}
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_abi;
+
+pub mod abi {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_abi::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_abi::*;
+}
+
+#[cfg(feature = "in-rust-tree")]
+extern crate rustc_index;
+
+pub mod index {
+ #[cfg(not(feature = "in-rust-tree"))]
+ pub use ::ra_ap_rustc_index::*;
+
+ #[cfg(feature = "in-rust-tree")]
+ pub use ::rustc_index::*;
+}
diff --git a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
index fb2b9ebef..0514af8e7 100644
--- a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
@@ -12,4 +12,4 @@ rust-version.workspace = true
doctest = false
[dependencies]
-xshell = "0.2.2"
+xshell.workspace = true
diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
index 1514c6c7d..18fa77fd9 100644
--- a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
@@ -6,7 +6,7 @@
//!
//! This crate contains utilities to make this kind of source-gen easy.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{
fmt, fs, mem,
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
index 536f000a4..c914ae214 100644
--- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -12,15 +12,16 @@ rust-version.workspace = true
doctest = false
[dependencies]
-libc = "0.2.135"
backtrace = { version = "0.3.67", optional = true }
always-assert = { version = "0.1.2", features = ["log"] }
jod-thread = "0.1.2"
+libc.workspace = true
crossbeam-channel = "0.5.5"
+itertools.workspace = true
# Think twice before adding anything here
[target.'cfg(windows)'.dependencies]
-miow = "0.5.0"
+miow = "0.6.0"
winapi = { version = "0.3.9", features = ["winerror"] }
[features]
diff --git a/src/tools/rust-analyzer/crates/stdx/src/anymap.rs b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs
new file mode 100644
index 000000000..9990f8b08
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/stdx/src/anymap.rs
@@ -0,0 +1,379 @@
+//! This file is a port of only the necessary features from https://github.com/chris-morgan/anymap version 1.0.0-beta.2 for use within rust-analyzer.
+//! Copyright © 2014–2022 Chris Morgan.
+//! COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING
+//! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0
+//!
+//! This implementation provides a safe and convenient store for one value of each type.
+//!
+//! Your starting point is [`Map`]. It has an example.
+//!
+//! # Cargo features
+//!
+//! This implementation has two independent features, each of which provides an implementation providing
+//! types `Map`, `AnyMap`, `OccupiedEntry`, `VacantEntry`, `Entry` and `RawMap`:
+//!
+//! - **std** (default, *enabled* in this build):
+//! an implementation using `std::collections::hash_map`, placed in the crate root
+//! (e.g. `anymap::AnyMap`).
+
+#![warn(missing_docs, unused_results)]
+
+use core::hash::Hasher;
+
+/// A hasher designed to eke a little more speed out, given `TypeId`’s known characteristics.
+///
+/// Specifically, this is a no-op hasher that expects to be fed a u64’s worth of
+/// randomly-distributed bits. It works well for `TypeId` (eliminating start-up time, so that my
+/// get_missing benchmark is ~30ns rather than ~900ns, and being a good deal faster after that, so
+/// that my insert_and_get_on_260_types benchmark is ~12μs instead of ~21.5μs), but will
+/// panic in debug mode and always emit zeros in release mode for any other sorts of inputs, so
+/// yeah, don’t use it! 😀
+#[derive(Default)]
+pub struct TypeIdHasher {
+ value: u64,
+}
+
+impl Hasher for TypeIdHasher {
+ #[inline]
+ fn write(&mut self, bytes: &[u8]) {
+ // This expects to receive exactly one 64-bit value, and there’s no realistic chance of
+ // that changing, but I don’t want to depend on something that isn’t expressly part of the
+ // contract for safety. But I’m OK with release builds putting everything in one bucket
+ // if it *did* change (and debug builds panicking).
+ debug_assert_eq!(bytes.len(), 8);
+ let _ = bytes.try_into().map(|array| self.value = u64::from_ne_bytes(array));
+ }
+
+ #[inline]
+ fn finish(&self) -> u64 {
+ self.value
+ }
+}
+
+use core::any::{Any, TypeId};
+use core::hash::BuildHasherDefault;
+use core::marker::PhantomData;
+
+use ::std::collections::hash_map::{self, HashMap};
+
+/// Raw access to the underlying `HashMap`.
+///
+/// This alias is provided for convenience because of the ugly third generic parameter.
+pub type RawMap<A> = HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
+
+/// A collection containing zero or one values for any given type and allowing convenient,
+/// type-safe access to those values.
+///
+/// The type parameter `A` allows you to use a different value type; normally you will want
+/// it to be `core::any::Any` (also known as `std::any::Any`), but there are other choices:
+///
+/// - If you want the entire map to be cloneable, use `CloneAny` instead of `Any`; with
+/// that, you can only add types that implement `Clone` to the map.
+/// - You can add on `+ Send` or `+ Send + Sync` (e.g. `Map<dyn Any + Send>`) to add those
+/// auto traits.
+///
+/// Cumulatively, there are thus six forms of map:
+///
+/// - <code>[Map]&lt;dyn [core::any::Any]&gt;</code>,
+/// also spelled [`AnyMap`] for convenience.
+/// - <code>[Map]&lt;dyn [core::any::Any] + Send&gt;</code>
+/// - <code>[Map]&lt;dyn [core::any::Any] + Send + Sync&gt;</code>
+/// - <code>[Map]&lt;dyn [CloneAny]&gt;</code>
+/// - <code>[Map]&lt;dyn [CloneAny] + Send&gt;</code>
+/// - <code>[Map]&lt;dyn [CloneAny] + Send + Sync&gt;</code>
+///
+/// ## Example
+///
+/// (Here using the [`AnyMap`] convenience alias; the first line could use
+/// <code>[anymap::Map][Map]::&lt;[core::any::Any]&gt;::new()</code> instead if desired.)
+///
+/// ```rust
+#[doc = "let mut data = anymap::AnyMap::new();"]
+/// assert_eq!(data.get(), None::<&i32>);
+/// ```
+///
+/// Values containing non-static references are not permitted.
+#[derive(Debug)]
+pub struct Map<A: ?Sized + Downcast = dyn Any> {
+ raw: RawMap<A>,
+}
+
+/// The most common type of `Map`: just using `Any`; <code>[Map]&lt;dyn [Any]&gt;</code>.
+///
+/// Why is this a separate type alias rather than a default value for `Map<A>`?
+/// `Map::new()` doesn’t seem to be happy to infer that it should go with the default
+/// value. It’s a bit sad, really. Ah well, I guess this approach will do.
+pub type AnyMap = Map<dyn Any>;
+impl<A: ?Sized + Downcast> Default for Map<A> {
+ #[inline]
+ fn default() -> Map<A> {
+ Map::new()
+ }
+}
+
+impl<A: ?Sized + Downcast> Map<A> {
+ /// Create an empty collection.
+ #[inline]
+ pub fn new() -> Map<A> {
+ Map { raw: RawMap::with_hasher(Default::default()) }
+ }
+
+ /// Returns a reference to the value stored in the collection for the type `T`,
+ /// if it exists.
+ #[inline]
+ pub fn get<T: IntoBox<A>>(&self) -> Option<&T> {
+ self.raw.get(&TypeId::of::<T>()).map(|any| unsafe { any.downcast_ref_unchecked::<T>() })
+ }
+
+ /// Gets the entry for the given type in the collection for in-place manipulation
+ #[inline]
+ pub fn entry<T: IntoBox<A>>(&mut self) -> Entry<'_, A, T> {
+ match self.raw.entry(TypeId::of::<T>()) {
+ hash_map::Entry::Occupied(e) => {
+ Entry::Occupied(OccupiedEntry { inner: e, type_: PhantomData })
+ }
+ hash_map::Entry::Vacant(e) => {
+ Entry::Vacant(VacantEntry { inner: e, type_: PhantomData })
+ }
+ }
+ }
+}
+
+/// A view into a single occupied location in an `Map`.
+pub struct OccupiedEntry<'a, A: ?Sized + Downcast, V: 'a> {
+ inner: hash_map::OccupiedEntry<'a, TypeId, Box<A>>,
+ type_: PhantomData<V>,
+}
+
+/// A view into a single empty location in an `Map`.
+pub struct VacantEntry<'a, A: ?Sized + Downcast, V: 'a> {
+ inner: hash_map::VacantEntry<'a, TypeId, Box<A>>,
+ type_: PhantomData<V>,
+}
+
+/// A view into a single location in an `Map`, which may be vacant or occupied.
+pub enum Entry<'a, A: ?Sized + Downcast, V> {
+ /// An occupied Entry
+ Occupied(OccupiedEntry<'a, A, V>),
+ /// A vacant Entry
+ Vacant(VacantEntry<'a, A, V>),
+}
+
+impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> Entry<'a, A, V> {
+ /// Ensures a value is in the entry by inserting the result of the default function if
+ /// empty, and returns a mutable reference to the value in the entry.
+ #[inline]
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Entry::Occupied(inner) => inner.into_mut(),
+ Entry::Vacant(inner) => inner.insert(default()),
+ }
+ }
+}
+
+impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> OccupiedEntry<'a, A, V> {
+ /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+ /// with a lifetime bound to the collection itself
+ #[inline]
+ pub fn into_mut(self) -> &'a mut V {
+ unsafe { self.inner.into_mut().downcast_mut_unchecked() }
+ }
+}
+
+impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> VacantEntry<'a, A, V> {
+ /// Sets the value of the entry with the VacantEntry's key,
+ /// and returns a mutable reference to it
+ #[inline]
+ pub fn insert(self, value: V) -> &'a mut V {
+ unsafe { self.inner.insert(value.into_box()).downcast_mut_unchecked() }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[derive(Clone, Debug, PartialEq)]
+ struct A(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct B(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct C(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct D(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct E(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct F(i32);
+ #[derive(Clone, Debug, PartialEq)]
+ struct J(i32);
+
+ #[test]
+ fn test_varieties() {
+ fn assert_send<T: Send>() {}
+ fn assert_sync<T: Sync>() {}
+ fn assert_debug<T: ::core::fmt::Debug>() {}
+ assert_send::<Map<dyn Any + Send>>();
+ assert_send::<Map<dyn Any + Send + Sync>>();
+ assert_sync::<Map<dyn Any + Send + Sync>>();
+ assert_debug::<Map<dyn Any>>();
+ assert_debug::<Map<dyn Any + Send>>();
+ assert_debug::<Map<dyn Any + Send + Sync>>();
+ assert_send::<Map<dyn CloneAny + Send>>();
+ assert_send::<Map<dyn CloneAny + Send + Sync>>();
+ assert_sync::<Map<dyn CloneAny + Send + Sync>>();
+ assert_debug::<Map<dyn CloneAny>>();
+ assert_debug::<Map<dyn CloneAny + Send>>();
+ assert_debug::<Map<dyn CloneAny + Send + Sync>>();
+ }
+
+ #[test]
+ fn type_id_hasher() {
+ use core::any::TypeId;
+ use core::hash::Hash;
+ fn verify_hashing_with(type_id: TypeId) {
+ let mut hasher = TypeIdHasher::default();
+ type_id.hash(&mut hasher);
+ // SAFETY: u64 is valid for all bit patterns.
+ let _ = hasher.finish();
+ }
+ // Pick a variety of types, just to demonstrate it’s all sane. Normal, zero-sized, unsized, &c.
+ verify_hashing_with(TypeId::of::<usize>());
+ verify_hashing_with(TypeId::of::<()>());
+ verify_hashing_with(TypeId::of::<str>());
+ verify_hashing_with(TypeId::of::<&str>());
+ verify_hashing_with(TypeId::of::<Vec<u8>>());
+ }
+}
+
+// impl some traits for dyn Any
+use core::fmt;
+
+#[doc(hidden)]
+pub trait CloneToAny {
+ /// Clone `self` into a new `Box<dyn CloneAny>` object.
+ fn clone_to_any(&self) -> Box<dyn CloneAny>;
+}
+
+impl<T: Any + Clone> CloneToAny for T {
+ #[inline]
+ fn clone_to_any(&self) -> Box<dyn CloneAny> {
+ Box::new(self.clone())
+ }
+}
+
+macro_rules! impl_clone {
+ ($t:ty) => {
+ impl Clone for Box<$t> {
+ #[inline]
+ fn clone(&self) -> Box<$t> {
+ // SAFETY: this dance is to reapply any Send/Sync marker. I’m not happy about this
+ // approach, given that I used to do it in safe code, but then came a dodgy
+ // future-compatibility warning where_clauses_object_safety, which is spurious for
+ // auto traits but still super annoying (future-compatibility lints seem to mean
+ // your bin crate needs a corresponding allow!). Although I explained my plight¹
+ // and it was all explained and agreed upon, no action has been taken. So I finally
+ // caved and worked around it by doing it this way, which matches what’s done for
+ // core::any², so it’s probably not *too* bad.
+ //
+ // ¹ https://github.com/rust-lang/rust/issues/51443#issuecomment-421988013
+ // ² https://github.com/rust-lang/rust/blob/e7825f2b690c9a0d21b6f6d84c404bb53b151b38/library/alloc/src/boxed.rs#L1613-L1616
+ let clone: Box<dyn CloneAny> = (**self).clone_to_any();
+ let raw: *mut dyn CloneAny = Box::into_raw(clone);
+ unsafe { Box::from_raw(raw as *mut $t) }
+ }
+ }
+
+ impl fmt::Debug for $t {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad(stringify!($t))
+ }
+ }
+ };
+}
+
+/// Methods for downcasting from an `Any`-like trait object.
+///
+/// This should only be implemented on trait objects for subtraits of `Any`, though you can
+/// implement it for other types and it’ll work fine, so long as your implementation is correct.
+pub trait Downcast {
+ /// Gets the `TypeId` of `self`.
+ fn type_id(&self) -> TypeId;
+
+ // Note the bound through these downcast methods is 'static, rather than the inexpressible
+ // concept of Self-but-as-a-trait (where Self is `dyn Trait`). This is sufficient, exceeding
+ // TypeId’s requirements. Sure, you *can* do CloneAny.downcast_unchecked::<NotClone>() and the
+ // type system won’t protect you, but that doesn’t introduce any unsafety: the method is
+ // already unsafe because you can specify the wrong type, and if this were exposing safe
+ // downcasting, CloneAny.downcast::<NotClone>() would just return an error, which is just as
+ // correct.
+ //
+ // Now in theory we could also add T: ?Sized, but that doesn’t play nicely with the common
+ // implementation, so I’m doing without it.
+
+ /// Downcast from `&Any` to `&T`, without checking the type matches.
+ ///
+ /// # Safety
+ ///
+ /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+ unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T;
+
+ /// Downcast from `&mut Any` to `&mut T`, without checking the type matches.
+ ///
+ /// # Safety
+ ///
+ /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+ unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T;
+}
+
+/// A trait for the conversion of an object into a boxed trait object.
+pub trait IntoBox<A: ?Sized + Downcast>: Any {
+ /// Convert self into the appropriate boxed form.
+ fn into_box(self) -> Box<A>;
+}
+
+macro_rules! implement {
+ ($any_trait:ident $(+ $auto_traits:ident)*) => {
+ impl Downcast for dyn $any_trait $(+ $auto_traits)* {
+ #[inline]
+ fn type_id(&self) -> TypeId {
+ self.type_id()
+ }
+
+ #[inline]
+ unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T {
+ &*(self as *const Self as *const T)
+ }
+
+ #[inline]
+ unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T {
+ &mut *(self as *mut Self as *mut T)
+ }
+ }
+
+ impl<T: $any_trait $(+ $auto_traits)*> IntoBox<dyn $any_trait $(+ $auto_traits)*> for T {
+ #[inline]
+ fn into_box(self) -> Box<dyn $any_trait $(+ $auto_traits)*> {
+ Box::new(self)
+ }
+ }
+ }
+}
+
+implement!(Any);
+implement!(Any + Send);
+implement!(Any + Send + Sync);
+
+/// [`Any`], but with cloning.
+///
+/// Every type with no non-`'static` references that implements `Clone` implements `CloneAny`.
+/// See [`core::any`] for more details on `Any` in general.
+pub trait CloneAny: Any + CloneToAny {}
+impl<T: Any + Clone> CloneAny for T {}
+implement!(CloneAny);
+implement!(CloneAny + Send);
+implement!(CloneAny + Send + Sync);
+impl_clone!(dyn CloneAny);
+impl_clone!(dyn CloneAny + Send);
+impl_clone!(dyn CloneAny + Send + Sync);
diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
index 24990d6a0..71e269f74 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
@@ -1,6 +1,6 @@
//! Missing batteries for standard libraries.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::io as sio;
use std::process::Command;
@@ -12,8 +12,10 @@ pub mod panic_context;
pub mod non_empty_vec;
pub mod rand;
pub mod thread;
+pub mod anymap;
pub use always_assert::{always, never};
+pub use itertools;
#[inline(always)]
pub fn is_ci() -> bool {
@@ -39,6 +41,24 @@ Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`.
);
}
+pub trait TupleExt {
+ type Head;
+ type Tail;
+ fn head(self) -> Self::Head;
+ fn tail(self) -> Self::Tail;
+}
+
+impl<T, U> TupleExt for (T, U) {
+ type Head = T;
+ type Tail = U;
+ fn head(self) -> Self::Head {
+ self.0
+ }
+ fn tail(self) -> Self::Tail {
+ self.1
+ }
+}
+
pub fn to_lower_snake_case(s: &str) -> String {
to_snake_case(s, char::to_lowercase)
}
@@ -89,6 +109,57 @@ where
words.join("_")
}
+// Taken from rustc.
+pub fn to_camel_case(ident: &str) -> String {
+ ident
+ .trim_matches('_')
+ .split('_')
+ .filter(|component| !component.is_empty())
+ .map(|component| {
+ let mut camel_cased_component = String::with_capacity(component.len());
+
+ let mut new_word = true;
+ let mut prev_is_lower_case = true;
+
+ for c in component.chars() {
+ // Preserve the case if an uppercase letter follows a lowercase letter, so that
+ // `camelCase` is converted to `CamelCase`.
+ if prev_is_lower_case && c.is_uppercase() {
+ new_word = true;
+ }
+
+ if new_word {
+ camel_cased_component.extend(c.to_uppercase());
+ } else {
+ camel_cased_component.extend(c.to_lowercase());
+ }
+
+ prev_is_lower_case = c.is_lowercase();
+ new_word = false;
+ }
+
+ camel_cased_component
+ })
+ .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
+ // separate two components with an underscore if their boundary cannot
+ // be distinguished using an uppercase/lowercase case distinction
+ let join = prev
+ .and_then(|prev| {
+ let f = next.chars().next()?;
+ let l = prev.chars().last()?;
+ Some(!char_has_case(l) && !char_has_case(f))
+ })
+ .unwrap_or(false);
+ (acc + if join { "_" } else { "" } + &next, Some(next))
+ })
+ .0
+}
+
+// Taken from rustc.
+pub fn char_has_case(c: char) -> bool {
+ c.is_lowercase() || c.is_uppercase()
+}
+
pub fn replace(buf: &mut String, from: char, to: &str) {
if !buf.contains(from) {
return;
diff --git a/src/tools/rust-analyzer/crates/stdx/src/macros.rs b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
index 1a9982fa8..d71e418c8 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/macros.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
@@ -15,7 +15,12 @@ macro_rules! eprintln {
macro_rules! format_to {
($buf:expr) => ();
($buf:expr, $lit:literal $($arg:tt)*) => {
- { use ::std::fmt::Write as _; let _ = ::std::write!($buf, $lit $($arg)*); }
+ {
+ use ::std::fmt::Write as _;
+ // We can't do ::std::fmt::Write::write_fmt($buf, format_args!($lit $($arg)*))
+ // unfortunately, as that loses out on autoref behavior.
+ _ = $buf.write_fmt(format_args!($lit $($arg)*))
+ }
};
}
diff --git a/src/tools/rust-analyzer/crates/stdx/src/process.rs b/src/tools/rust-analyzer/crates/stdx/src/process.rs
index e5aa34365..bca0cbc36 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/process.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/process.rs
@@ -23,7 +23,7 @@ pub fn streaming_output(
let idx = if eof {
data.len()
} else {
- match data.iter().rposition(|b| *b == b'\n') {
+ match data.iter().rposition(|&b| b == b'\n') {
Some(i) => i + 1,
None => return,
}
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
index 5ee0c4792..7a7c0d267 100644
--- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -14,16 +14,16 @@ doctest = false
[dependencies]
cov-mark = "2.0.0-pre.1"
-either = "1.7.0"
-itertools = "0.10.5"
-rowan = "0.15.11"
+either.workspace = true
+itertools.workspace = true
+rowan = "0.15.15"
rustc-hash = "1.1.0"
once_cell = "1.17.0"
-indexmap = "2.0.0"
+indexmap.workspace = true
smol_str.workspace = true
triomphe.workspace = true
-rustc_lexer.workspace = true
+rustc-dependencies.workspace = true
parser.workspace = true
profile.workspace = true
@@ -31,7 +31,7 @@ stdx.workspace = true
text-edit.workspace = true
[dev-dependencies]
-rayon = "1.6.1"
+rayon.workspace = true
expect-test = "1.4.0"
proc-macro2 = "1.0.47"
quote = "1.0.20"
@@ -41,4 +41,4 @@ test-utils.workspace = true
sourcegen.workspace = true
[features]
-in-rust-tree = []
+in-rust-tree = ["rustc-dependencies/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index 3603560d3..c3010d090 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -36,7 +36,7 @@ PathSegment =
'::'? NameRef
| NameRef GenericArgList?
| NameRef ParamList RetType?
-| '<' PathType ('as' PathType)? '>'
+| '<' Type ('as' PathType)? '>'
GenericArgList =
'::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
index a150d9e6c..37d821204 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -3,18 +3,17 @@
use std::iter::{empty, successors};
use parser::{SyntaxKind, T};
-use rowan::SyntaxElement;
use crate::{
algo::{self, neighbor},
ast::{self, edit::IndentLevel, make, HasGenericParams},
ted::{self, Position},
- AstNode, AstToken, Direction,
+ AstNode, AstToken, Direction, SyntaxElement,
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
SyntaxNode, SyntaxToken,
};
-use super::HasName;
+use super::{HasArgList, HasName};
pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
@@ -224,7 +223,7 @@ pub trait AttrsOwnerEdit: ast::HasAttrs {
let after_attrs_and_comments = node
.children_with_tokens()
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
- .map_or(Position::first_child_of(node), |it| Position::before(it));
+ .map_or(Position::first_child_of(node), Position::before);
ted::insert_all(
after_attrs_and_comments,
@@ -362,6 +361,24 @@ impl ast::PathSegment {
}
}
+impl ast::MethodCallExpr {
+ pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
+ if self.generic_arg_list().is_none() {
+ let generic_arg_list = make::turbofish_generic_arg_list(empty()).clone_for_update();
+
+ if let Some(arg_list) = self.arg_list() {
+ ted::insert_raw(
+ ted::Position::before(arg_list.syntax()),
+ generic_arg_list.syntax(),
+ );
+ } else {
+ ted::append_child(self.syntax(), generic_arg_list.syntax());
+ }
+ }
+ self.generic_arg_list().unwrap()
+ }
+}
+
impl Removable for ast::UseTree {
fn remove(&self) {
for dir in [Direction::Next, Direction::Prev] {
@@ -433,7 +450,9 @@ impl ast::UseTree {
if &path == prefix && self.use_tree_list().is_none() {
if self.star_token().is_some() {
// path$0::* -> *
- self.coloncolon_token().map(ted::remove);
+ if let Some(a) = self.coloncolon_token() {
+ ted::remove(a)
+ }
ted::remove(prefix.syntax());
} else {
// path$0 -> self
@@ -460,7 +479,9 @@ impl ast::UseTree {
for p in successors(parent.parent_path(), |it| it.parent_path()) {
p.segment()?;
}
- prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
+ if let Some(a) = prefix.parent_path().and_then(|p| p.coloncolon_token()) {
+ ted::remove(a)
+ }
ted::remove(prefix.syntax());
Some(())
}
@@ -555,7 +576,7 @@ impl ast::AssocItemList {
None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
},
};
- let elements: Vec<SyntaxElement<_>> = vec![
+ let elements: Vec<SyntaxElement> = vec![
make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
item.syntax().clone().into(),
];
@@ -625,6 +646,50 @@ impl ast::MatchArmList {
}
}
+impl ast::LetStmt {
+ pub fn set_ty(&self, ty: Option<ast::Type>) {
+ match ty {
+ None => {
+ if let Some(colon_token) = self.colon_token() {
+ ted::remove(colon_token);
+ }
+
+ if let Some(existing_ty) = self.ty() {
+ if let Some(sibling) = existing_ty.syntax().prev_sibling_or_token() {
+ if sibling.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(sibling);
+ }
+ }
+
+ ted::remove(existing_ty.syntax());
+ }
+
+ // Remove any trailing ws
+ if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
+ {
+ last.detach();
+ }
+ }
+ Some(new_ty) => {
+ if self.colon_token().is_none() {
+ ted::insert_raw(
+ Position::after(
+ self.pat().expect("let stmt should have a pattern").syntax(),
+ ),
+ make::token(T![:]),
+ );
+ }
+
+ if let Some(old_ty) = self.ty() {
+ ted::replace(old_ty.syntax(), new_ty.syntax());
+ } else {
+ ted::insert(Position::after(self.colon_token().unwrap()), new_ty.syntax());
+ }
+ }
+ }
+ }
+}
+
impl ast::RecordExprFieldList {
pub fn add_field(&self, field: ast::RecordExprField) {
let is_multiline = self.syntax().text().contains_char('\n');
@@ -749,7 +814,7 @@ impl ast::VariantList {
None => (IndentLevel::single(), Position::last_child_of(self.syntax())),
},
};
- let elements: Vec<SyntaxElement<_>> = vec![
+ let elements: Vec<SyntaxElement> = vec![
make::tokens::whitespace(&format!("{}{indent}", "\n")).into(),
variant.syntax().clone().into(),
ast::make::token(T![,]).into(),
@@ -784,6 +849,53 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
Some(())
}
+impl ast::IdentPat {
+ pub fn set_pat(&self, pat: Option<ast::Pat>) {
+ match pat {
+ None => {
+ if let Some(at_token) = self.at_token() {
+ // Remove `@ Pat`
+ let start = at_token.clone().into();
+ let end = self
+ .pat()
+ .map(|it| it.syntax().clone().into())
+ .unwrap_or_else(|| at_token.into());
+
+ ted::remove_all(start..=end);
+
+ // Remove any trailing ws
+ if let Some(last) =
+ self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
+ {
+ last.detach();
+ }
+ }
+ }
+ Some(pat) => {
+ if let Some(old_pat) = self.pat() {
+ // Replace existing pattern
+ ted::replace(old_pat.syntax(), pat.syntax())
+ } else if let Some(at_token) = self.at_token() {
+ // Have an `@` token but not a pattern yet
+ ted::insert(ted::Position::after(at_token), pat.syntax());
+ } else {
+ // Don't have an `@`, should have a name
+ let name = self.name().unwrap();
+
+ ted::insert_all(
+ ted::Position::after(name.syntax()),
+ vec![
+ make::token(T![@]).into(),
+ make::tokens::single_space().into(),
+ pat.syntax().clone().into(),
+ ],
+ )
+ }
+ }
+ }
+ }
+}
+
pub trait HasVisibilityEdit: ast::HasVisibility {
fn set_visibility(&self, visbility: ast::Visibility) {
match self.visibility() {
@@ -886,6 +998,65 @@ mod tests {
}
#[test]
+ fn test_ident_pat_set_pat() {
+ #[track_caller]
+ fn check(before: &str, expected: &str, pat: Option<ast::Pat>) {
+ let pat = pat.map(|it| it.clone_for_update());
+
+ let ident_pat = ast_mut_from_text::<ast::IdentPat>(&format!("fn f() {{ {before} }}"));
+ ident_pat.set_pat(pat);
+
+ let after = ast_mut_from_text::<ast::IdentPat>(&format!("fn f() {{ {expected} }}"));
+ assert_eq!(ident_pat.to_string(), after.to_string());
+ }
+
+ // replacing
+ check("let a @ _;", "let a @ ();", Some(make::tuple_pat([]).into()));
+
+ // note: no trailing semicolon is added for the below tests since it
+ // seems to be picked up by the ident pat during error recovery?
+
+ // adding
+ check("let a ", "let a @ ()", Some(make::tuple_pat([]).into()));
+ check("let a @ ", "let a @ ()", Some(make::tuple_pat([]).into()));
+
+ // removing
+ check("let a @ ()", "let a", None);
+ check("let a @ ", "let a", None);
+ }
+
+ #[test]
+ fn test_let_stmt_set_ty() {
+ #[track_caller]
+ fn check(before: &str, expected: &str, ty: Option<ast::Type>) {
+ let ty = ty.map(|it| it.clone_for_update());
+
+ let let_stmt = ast_mut_from_text::<ast::LetStmt>(&format!("fn f() {{ {before} }}"));
+ let_stmt.set_ty(ty);
+
+ let after = ast_mut_from_text::<ast::LetStmt>(&format!("fn f() {{ {expected} }}"));
+ assert_eq!(let_stmt.to_string(), after.to_string(), "{let_stmt:#?}\n!=\n{after:#?}");
+ }
+
+ // adding
+ check("let a;", "let a: ();", Some(make::ty_tuple([])));
+ // no semicolon due to it being eaten during error recovery
+ check("let a:", "let a: ()", Some(make::ty_tuple([])));
+
+ // replacing
+ check("let a: u8;", "let a: ();", Some(make::ty_tuple([])));
+ check("let a: u8 = 3;", "let a: () = 3;", Some(make::ty_tuple([])));
+ check("let a: = 3;", "let a: () = 3;", Some(make::ty_tuple([])));
+
+ // removing
+ check("let a: u8;", "let a;", None);
+ check("let a:;", "let a;", None);
+
+ check("let a: u8 = 3;", "let a = 3;", None);
+ check("let a: = 3;", "let a = 3;", None);
+ }
+
+ #[test]
fn add_variant_to_empty_enum() {
let variant = make::variant(make::name("Bar"), None).clone_for_update();
@@ -976,7 +1147,9 @@ enum Foo {
fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) {
let enum_ = ast_mut_from_text::<ast::Enum>(before);
- enum_.variant_list().map(|it| it.add_variant(variant));
+ if let Some(it) = enum_.variant_list() {
+ it.add_variant(variant)
+ }
let after = enum_.to_string();
assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(after.trim()));
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index 7ba0d4dc6..6c86e5910 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -59,8 +59,9 @@ impl PathSegment {
pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
- pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
}
@@ -1577,14 +1578,6 @@ impl RecordPatField {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum GenericArg {
- TypeArg(TypeArg),
- AssocTypeArg(AssocTypeArg),
- LifetimeArg(LifetimeArg),
- ConstArg(ConstArg),
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Type {
ArrayType(ArrayType),
DynTraitType(DynTraitType),
@@ -1603,6 +1596,14 @@ pub enum Type {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ TypeArg(TypeArg),
+ AssocTypeArg(AssocTypeArg),
+ LifetimeArg(LifetimeArg),
+ ConstArg(ConstArg),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Expr {
ArrayExpr(ArrayExpr),
AsmExpr(AsmExpr),
@@ -3319,41 +3320,6 @@ impl AstNode for RecordPatField {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
-impl From<TypeArg> for GenericArg {
- fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
-}
-impl From<AssocTypeArg> for GenericArg {
- fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
-}
-impl From<LifetimeArg> for GenericArg {
- fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
-}
-impl From<ConstArg> for GenericArg {
- fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
-}
-impl AstNode for GenericArg {
- fn can_cast(kind: SyntaxKind) -> bool {
- matches!(kind, TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG)
- }
- fn cast(syntax: SyntaxNode) -> Option<Self> {
- let res = match syntax.kind() {
- TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
- ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
- LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
- CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
- _ => return None,
- };
- Some(res)
- }
- fn syntax(&self) -> &SyntaxNode {
- match self {
- GenericArg::TypeArg(it) => &it.syntax,
- GenericArg::AssocTypeArg(it) => &it.syntax,
- GenericArg::LifetimeArg(it) => &it.syntax,
- GenericArg::ConstArg(it) => &it.syntax,
- }
- }
-}
impl From<ArrayType> for Type {
fn from(node: ArrayType) -> Type { Type::ArrayType(node) }
}
@@ -3455,6 +3421,41 @@ impl AstNode for Type {
}
}
}
+impl From<TypeArg> for GenericArg {
+ fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
+}
+impl From<AssocTypeArg> for GenericArg {
+ fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
+}
+impl From<LifetimeArg> for GenericArg {
+ fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
+}
+impl From<ConstArg> for GenericArg {
+ fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
+}
+impl AstNode for GenericArg {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
+ ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
+ LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
+ CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericArg::TypeArg(it) => &it.syntax,
+ GenericArg::AssocTypeArg(it) => &it.syntax,
+ GenericArg::LifetimeArg(it) => &it.syntax,
+ GenericArg::ConstArg(it) => &it.syntax,
+ }
+ }
+}
impl From<ArrayExpr> for Expr {
fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) }
}
@@ -4340,12 +4341,12 @@ impl AstNode for AnyHasVisibility {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
-impl std::fmt::Display for GenericArg {
+impl std::fmt::Display for Type {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
-impl std::fmt::Display for Type {
+impl std::fmt::Display for GenericArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
index 17e311c0c..ad63cc558 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -433,7 +433,6 @@ pub fn record_field(
ast_from_text(&format!("struct S {{ {visibility}{name}: {ty}, }}"))
}
-// TODO
pub fn block_expr(
stmts: impl IntoIterator<Item = ast::Stmt>,
tail_expr: Option<ast::Expr>,
@@ -853,6 +852,10 @@ pub fn self_param() -> ast::SelfParam {
ast_from_text("fn f(&self) { }")
}
+pub fn mut_self_param() -> ast::SelfParam {
+ ast_from_text("fn f(&mut self) { }")
+}
+
pub fn ret_type(ty: ast::Type) -> ast::RetType {
ast_from_text(&format!("fn f() -> {ty} {{ }}"))
}
@@ -938,6 +941,13 @@ pub fn lifetime_arg(lifetime: ast::Lifetime) -> ast::LifetimeArg {
ast_from_text(&format!("const S: T<{lifetime}> = ();"))
}
+pub fn turbofish_generic_arg_list(
+ args: impl IntoIterator<Item = ast::GenericArg>,
+) -> ast::GenericArgList {
+ let args = args.into_iter().join(", ");
+ ast_from_text(&format!("const S: T::<{args}> = ();"))
+}
+
pub(crate) fn generic_arg_list(
args: impl IntoIterator<Item = ast::GenericArg>,
) -> ast::GenericArgList {
@@ -973,6 +983,11 @@ pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::T
ast_from_text(&format!("struct f({visibility}{ty});"))
}
+pub fn variant_list(variants: impl IntoIterator<Item = ast::Variant>) -> ast::VariantList {
+ let variants = variants.into_iter().join(", ");
+ ast_from_text(&format!("enum f {{ {variants} }}"))
+}
+
pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
let field_list = match field_list {
None => String::new(),
@@ -1037,6 +1052,19 @@ pub fn struct_(
ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",))
}
+pub fn enum_(
+ visibility: Option<ast::Visibility>,
+ enum_name: ast::Name,
+ variant_list: ast::VariantList,
+) -> ast::Enum {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{it} "),
+ };
+
+ ast_from_text(&format!("{visibility}enum {enum_name} {variant_list}"))
+}
+
pub fn attr_outer(meta: ast::Meta) -> ast::Attr {
ast_from_text(&format!("#[{meta}]"))
}
@@ -1105,7 +1133,7 @@ pub mod tokens {
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
SourceFile::parse(
- "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p)\n;\n\n",
+ "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\n",
)
});
@@ -1149,6 +1177,16 @@ pub mod tokens {
lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
}
+ pub fn ident(text: &str) -> SyntaxToken {
+ assert_eq!(text.trim(), text);
+ let path: ast::Path = super::ext::ident_path(text);
+ path.syntax()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == IDENT)
+ .unwrap()
+ }
+
pub fn single_newline() -> SyntaxToken {
let res = SOURCE_FILE
.tree()
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
index 691d0c618..f81dff884 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs
@@ -275,10 +275,19 @@ impl ast::Path {
successors(Some(self.clone()), ast::Path::qualifier).last().unwrap()
}
+ pub fn first_qualifier(&self) -> Option<ast::Path> {
+ successors(self.qualifier(), ast::Path::qualifier).last()
+ }
+
pub fn first_segment(&self) -> Option<ast::PathSegment> {
self.first_qualifier_or_self().segment()
}
+ // FIXME: Check usages of Self::segments, they might be wrong because of the logic of the bloew function
+ pub fn segments_of_this_path_only_rev(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
+ self.qualifiers_and_self().filter_map(|it| it.segment())
+ }
+
pub fn segments(&self) -> impl Iterator<Item = ast::PathSegment> + Clone {
successors(self.first_segment(), |p| {
p.parent_path().parent_path().and_then(|p| p.segment())
@@ -289,6 +298,10 @@ impl ast::Path {
successors(self.qualifier(), |p| p.qualifier())
}
+ pub fn qualifiers_and_self(&self) -> impl Iterator<Item = ast::Path> + Clone {
+ successors(Some(self.clone()), |p| p.qualifier())
+ }
+
pub fn top_path(&self) -> ast::Path {
let mut this = self.clone();
while let Some(path) = this.parent_path() {
@@ -361,6 +374,15 @@ impl ast::Impl {
}
}
+// [#15778](https://github.com/rust-lang/rust-analyzer/issues/15778)
+impl ast::PathSegment {
+ pub fn qualifying_trait(&self) -> Option<ast::PathType> {
+ let mut path_types = support::children(self.syntax());
+ let first = path_types.next()?;
+ path_types.next().or(Some(first))
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum StructKind {
Record(ast::RecordFieldList),
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index 87fd51d70..d5d565a01 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -2,6 +2,8 @@
use std::borrow::Cow;
+use rustc_dependencies::lexer as rustc_lexer;
+
use rustc_lexer::unescape::{
unescape_byte, unescape_c_string, unescape_char, unescape_literal, CStrUnit, Mode,
};
@@ -119,6 +121,7 @@ impl ast::Whitespace {
}
}
+#[derive(Debug)]
pub struct QuoteOffsets {
pub quotes: (TextRange, TextRange),
pub contents: TextRange,
@@ -165,6 +168,11 @@ pub trait IsString: AstToken {
fn text_range_between_quotes(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.contents)
}
+ fn text_without_quotes(&self) -> &str {
+ let text = self.text();
+ let Some(offsets) = self.text_range_between_quotes() else { return text };
+ &text[offsets - self.syntax().text_range().start()]
+ }
fn open_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.0)
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
index 3e43df2d0..16f7356b1 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/traits.rs
@@ -76,9 +76,6 @@ pub trait HasDocComments: HasAttrs {
fn doc_comments(&self) -> DocCommentIter {
DocCommentIter { iter: self.syntax().children_with_tokens() }
}
- fn doc_comments_and_attrs(&self) -> AttrDocCommentIter {
- AttrDocCommentIter { iter: self.syntax().children_with_tokens() }
- }
}
impl DocCommentIter {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
index 27c8a13e5..d60069804 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -19,7 +19,8 @@
//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+#![warn(rust_2018_idioms, unused_lifetimes)]
#[allow(unused)]
macro_rules! eprintln {
@@ -74,7 +75,7 @@ pub use smol_str::SmolStr;
#[derive(Debug, PartialEq, Eq)]
pub struct Parse<T> {
green: GreenNode,
- errors: Arc<Vec<SyntaxError>>,
+ errors: Option<Arc<[SyntaxError]>>,
_ty: PhantomData<fn() -> T>,
}
@@ -86,14 +87,18 @@ impl<T> Clone for Parse<T> {
impl<T> Parse<T> {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
- Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ Parse {
+ green,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ }
}
pub fn syntax_node(&self) -> SyntaxNode {
SyntaxNode::new_root(self.green.clone())
}
pub fn errors(&self) -> &[SyntaxError] {
- &self.errors
+ self.errors.as_deref().unwrap_or_default()
}
}
@@ -106,11 +111,10 @@ impl<T: AstNode> Parse<T> {
T::cast(self.syntax_node()).unwrap()
}
- pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
- if self.errors.is_empty() {
- Ok(self.tree())
- } else {
- Err(self.errors)
+ pub fn ok(self) -> Result<T, Arc<[SyntaxError]>> {
+ match self.errors {
+ Some(e) => Err(e),
+ None => Ok(self.tree()),
}
}
}
@@ -128,7 +132,7 @@ impl Parse<SyntaxNode> {
impl Parse<SourceFile> {
pub fn debug_dump(&self) -> String {
let mut buf = format!("{:#?}", self.tree().syntax());
- for err in self.errors.iter() {
+ for err in self.errors.as_deref().into_iter().flat_map(<[_]>::iter) {
format_to!(buf, "error {:?}: {}\n", err.range(), err);
}
buf
@@ -140,13 +144,16 @@ impl Parse<SourceFile> {
fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
// FIXME: validation errors are not handled here
- parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
- |(green_node, errors, _reparsed_range)| Parse {
- green: green_node,
- errors: Arc::new(errors),
- _ty: PhantomData,
- },
+ parsing::incremental_reparse(
+ self.tree().syntax(),
+ indel,
+ self.errors.as_deref().unwrap_or_default().iter().cloned(),
)
+ .map(|(green_node, errors, _reparsed_range)| Parse {
+ green: green_node,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ })
}
fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
@@ -167,7 +174,11 @@ impl SourceFile {
errors.extend(validation::validate(&root));
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
- Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ Parse {
+ green,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ }
}
}
@@ -181,29 +192,27 @@ impl ast::TokenTree {
let kind = t.kind();
if kind.is_trivia() {
was_joint = false
+ } else if kind == SyntaxKind::IDENT {
+ let token_text = t.text();
+ let contextual_kw =
+ SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
+ parser_input.push_ident(contextual_kw);
} else {
- if kind == SyntaxKind::IDENT {
- let token_text = t.text();
- let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
- .unwrap_or(SyntaxKind::IDENT);
- parser_input.push_ident(contextual_kw);
- } else {
- if was_joint {
+ if was_joint {
+ parser_input.was_joint();
+ }
+ parser_input.push(kind);
+ // Tag the token as joint if it is float with a fractional part
+ // we use this jointness to inform the parser about what token split
+ // event to emit when we encounter a float literal in a field access
+ if kind == SyntaxKind::FLOAT_NUMBER {
+ if !t.text().ends_with('.') {
parser_input.was_joint();
- }
- parser_input.push(kind);
- // Tag the token as joint if it is float with a fractional part
- // we use this jointness to inform the parser about what token split
- // event to emit when we encounter a float literal in a field access
- if kind == SyntaxKind::FLOAT_NUMBER {
- if !t.text().ends_with('.') {
- parser_input.was_joint();
- } else {
- was_joint = false;
- }
} else {
- was_joint = true;
+ was_joint = false;
}
+ } else {
+ was_joint = true;
}
}
}
@@ -276,7 +285,11 @@ impl ast::TokenTree {
let (green, errors) = builder.finish_raw();
- Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ Parse {
+ green,
+ errors: if errors.is_empty() { None } else { Some(errors.into()) },
+ _ty: PhantomData,
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
index 45e591609..0ddc64171 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -20,7 +20,7 @@ use crate::{
pub(crate) fn incremental_reparse(
node: &SyntaxNode,
edit: &Indel,
- errors: Vec<SyntaxError>,
+ errors: impl IntoIterator<Item = SyntaxError>,
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
@@ -147,7 +147,7 @@ fn is_balanced(lexed: &parser::LexedStr<'_>) -> bool {
}
fn merge_errors(
- old_errors: Vec<SyntaxError>,
+ old_errors: impl IntoIterator<Item = SyntaxError>,
new_errors: Vec<SyntaxError>,
range_before_reparse: TextRange,
edit: &Indel,
@@ -191,8 +191,12 @@ mod tests {
let fully_reparsed = SourceFile::parse(&after);
let incrementally_reparsed: Parse<SourceFile> = {
let before = SourceFile::parse(&before);
- let (green, new_errors, range) =
- incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap();
+ let (green, new_errors, range) = incremental_reparse(
+ before.tree().syntax(),
+ &edit,
+ before.errors.as_deref().unwrap_or_default().iter().cloned(),
+ )
+ .unwrap();
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
Parse::new(green, new_errors)
};
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
index 1d4a89201..8750147ee 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ptr.rs
@@ -22,12 +22,18 @@ use crate::{syntax_node::RustLanguage, AstNode, SyntaxNode};
pub type SyntaxNodePtr = rowan::ast::SyntaxNodePtr<RustLanguage>;
/// Like `SyntaxNodePtr`, but remembers the type of node.
-#[derive(Debug)]
pub struct AstPtr<N: AstNode> {
raw: SyntaxNodePtr,
_ty: PhantomData<fn() -> N>,
}
+impl<N: AstNode + std::fmt::Debug> std::fmt::Debug for AstPtr<N> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_tuple("AstPtr").field(&self.raw).finish()
+ }
+}
+
+impl<N: AstNode> Copy for AstPtr<N> {}
impl<N: AstNode> Clone for AstPtr<N> {
fn clone(&self) -> AstPtr<N> {
AstPtr { raw: self.raw.clone(), _ty: PhantomData }
@@ -73,6 +79,10 @@ impl<N: AstNode> AstPtr<N> {
Some(AstPtr { raw: self.raw, _ty: PhantomData })
}
+ pub fn kind(&self) -> parser::SyntaxKind {
+ self.raw.kind()
+ }
+
pub fn upcast<M: AstNode>(self) -> AstPtr<M>
where
N: Into<M>,
@@ -84,6 +94,20 @@ impl<N: AstNode> AstPtr<N> {
pub fn try_from_raw(raw: SyntaxNodePtr) -> Option<AstPtr<N>> {
N::can_cast(raw.kind()).then_some(AstPtr { raw, _ty: PhantomData })
}
+
+ pub fn wrap_left<R>(self) -> AstPtr<either::Either<N, R>>
+ where
+ either::Either<N, R>: AstNode,
+ {
+ AstPtr { raw: self.raw, _ty: PhantomData }
+ }
+
+ pub fn wrap_right<L>(self) -> AstPtr<either::Either<L, N>>
+ where
+ either::Either<L, N>: AstNode,
+ {
+ AstPtr { raw: self.raw, _ty: PhantomData }
+ }
}
impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
index 168439053..8ae1242cf 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs
@@ -17,11 +17,11 @@ use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
#[test]
fn parse_smoke_test() {
- let code = r##"
+ let code = r#"
fn main() {
println!("Hello, world!")
}
- "##;
+ "#;
let parse = SourceFile::parse(code);
// eprintln!("{:#?}", parse.syntax_node());
@@ -38,7 +38,7 @@ fn benchmark_parser() {
let tree = {
let _b = bench("parsing");
let p = SourceFile::parse(&data);
- assert!(p.errors.is_empty());
+ assert!(p.errors.is_none());
assert_eq!(p.tree().syntax.text_range().len(), 352474.into());
p.tree()
};
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
index dc6c96343..c2e921e4b 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/sourcegen_ast.rs
@@ -622,7 +622,7 @@ fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
}
fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
- if lower_seperated_list(acc, grammar, label, rule) {
+ if lower_separated_list(acc, grammar, label, rule) {
return;
}
@@ -688,7 +688,7 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
}
// (T (',' T)* ','?)
-fn lower_seperated_list(
+fn lower_separated_list(
acc: &mut Vec<Field>,
grammar: &Grammar,
label: Option<&String>,
diff --git a/src/tools/rust-analyzer/crates/syntax/src/token_text.rs b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
index 09c080c0c..e69deb49c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/token_text.rs
@@ -13,7 +13,7 @@ pub(crate) enum Repr<'a> {
}
impl<'a> TokenText<'a> {
- pub(crate) fn borrowed(text: &'a str) -> Self {
+ pub fn borrowed(text: &'a str) -> Self {
TokenText(Repr::Borrowed(text))
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
index 25f34ea9d..a38f8b2b5 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
@@ -1,48 +1,8 @@
//! A set of utils methods to reuse on other abstraction levels
-use itertools::Itertools;
-
-use crate::{ast, match_ast, AstNode, SyntaxKind};
-
-pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
- path.syntax()
- .children()
- .filter_map(|node| {
- match_ast! {
- match node {
- ast::PathSegment(it) => {
- Some(it.name_ref()?.to_string())
- },
- ast::Path(it) => {
- Some(path_to_string_stripping_turbo_fish(&it))
- },
- _ => None,
- }
- }
- })
- .join("::")
-}
+use crate::SyntaxKind;
pub fn is_raw_identifier(name: &str) -> bool {
let is_keyword = SyntaxKind::from_keyword(name).is_some();
is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
}
-
-#[cfg(test)]
-mod tests {
- use super::path_to_string_stripping_turbo_fish;
- use crate::ast::make;
-
- #[test]
- fn turbofishes_are_stripped() {
- assert_eq!("Vec", path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>")),);
- assert_eq!(
- "Vec::new",
- path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::<i32>::new")),
- );
- assert_eq!(
- "Vec::new",
- path_to_string_stripping_turbo_fish(&make::path_from_text("Vec::new()")),
- );
- }
-}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/validation.rs b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
index e0ec6a242..2b1bbac08 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/validation.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/validation.rs
@@ -5,7 +5,7 @@
mod block;
use rowan::Direction;
-use rustc_lexer::unescape::{self, unescape_literal, Mode};
+use rustc_dependencies::lexer::unescape::{self, unescape_literal, Mode};
use crate::{
algo,
diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
index 2b5b6f495..438b599ff 100644
--- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
# Avoid adding deps here, this crate is widely used in tests it should compile fast!
-dissimilar = "1.0.4"
+dissimilar = "1.0.7"
text-size.workspace = true
rustc-hash = "1.1.0"
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
index fd3e68e2d..e48b27313 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs
@@ -6,7 +6,7 @@
//! * Extracting markup (mainly, `$0` markers) out of fixture strings.
//! * marks (see the eponymous module).
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod assert_linear;
pub mod bench_fixture;
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 573f56b00..f766747d7 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -9,12 +9,15 @@
//!
//! Available flags:
//! add:
+//! asm:
+//! assert:
//! as_ref: sized
//! bool_impl: option, fn
//! builtin_impls:
//! cell: copy, drop
//! clone: sized
//! coerce_unsized: unsize
+//! concat:
//! copy: clone
//! default: sized
//! deref_mut: deref
@@ -44,7 +47,7 @@
//! panic: fmt
//! phantom_data:
//! pin:
-//! pointee:
+//! pointee: copy, send, sync, ord, hash, unpin
//! range:
//! result:
//! send: sized
@@ -54,6 +57,7 @@
//! sync: sized
//! transmute:
//! try: infallible
+//! unpin: sized
//! unsize: sized
#![rustc_coherence_is_core]
@@ -89,6 +93,11 @@ pub mod marker {
pub trait Unsize<T: ?Sized> {}
// endregion:unsize
+ // region:unpin
+ #[lang = "unpin"]
+ pub auto trait Unpin {}
+ // endregion:unpin
+
// region:copy
#[lang = "copy"]
pub trait Copy: Clone {}
@@ -387,9 +396,10 @@ pub mod ptr {
// region:pointee
#[lang = "pointee_trait"]
+ #[rustc_deny_explicit_impl(implement_via_object = false)]
pub trait Pointee {
#[lang = "metadata_type"]
- type Metadata;
+ type Metadata: Copy + Send + Sync + Ord + Hash + Unpin;
}
// endregion:pointee
// region:non_null
@@ -489,7 +499,7 @@ pub mod ops {
I: SliceIndex<[T]>,
{
type Output = I::Output;
- fn index(&self, index: I) -> &I::Output {
+ fn index(&self, _index: I) -> &I::Output {
loop {}
}
}
@@ -497,7 +507,7 @@ pub mod ops {
where
I: SliceIndex<[T]>,
{
- fn index_mut(&mut self, index: I) -> &mut I::Output {
+ fn index_mut(&mut self, _index: I) -> &mut I::Output {
loop {}
}
}
@@ -507,7 +517,7 @@ pub mod ops {
I: SliceIndex<[T]>,
{
type Output = I::Output;
- fn index(&self, index: I) -> &I::Output {
+ fn index(&self, _index: I) -> &I::Output {
loop {}
}
}
@@ -515,7 +525,7 @@ pub mod ops {
where
I: SliceIndex<[T]>,
{
- fn index_mut(&mut self, index: I) -> &mut I::Output {
+ fn index_mut(&mut self, _index: I) -> &mut I::Output {
loop {}
}
}
@@ -863,17 +873,17 @@ pub mod fmt {
pub struct DebugTuple;
pub struct DebugStruct;
impl Formatter<'_> {
- pub fn debug_tuple(&mut self, name: &str) -> DebugTuple {
+ pub fn debug_tuple(&mut self, _name: &str) -> DebugTuple {
DebugTuple
}
- pub fn debug_struct(&mut self, name: &str) -> DebugStruct {
+ pub fn debug_struct(&mut self, _name: &str) -> DebugStruct {
DebugStruct
}
}
impl DebugTuple {
- pub fn field(&mut self, value: &dyn Debug) -> &mut Self {
+ pub fn field(&mut self, _value: &dyn Debug) -> &mut Self {
self
}
@@ -883,7 +893,7 @@ pub mod fmt {
}
impl DebugStruct {
- pub fn field(&mut self, name: &str, value: &dyn Debug) -> &mut Self {
+ pub fn field(&mut self, _name: &str, _value: &dyn Debug) -> &mut Self {
self
}
@@ -996,7 +1006,7 @@ pub mod fmt {
($($t:ty)*) => {
$(
impl const Debug for $t {
- fn fmt(&self, f: &mut Formatter<'_>) -> Result {
+ fn fmt(&self, _f: &mut Formatter<'_>) -> Result {
Ok(())
}
}
@@ -1012,7 +1022,7 @@ pub mod fmt {
}
impl<T: Debug> Debug for [T] {
- fn fmt(&self, f: &mut Formatter<'_>) -> Result {
+ fn fmt(&self, _f: &mut Formatter<'_>) -> Result {
Ok(())
}
}
@@ -1047,6 +1057,10 @@ pub mod option {
Some(T),
}
+ // region:copy
+ impl<T: Copy> Copy for Option<T> {}
+ // endregion:copy
+
impl<T> Option<T> {
pub const fn unwrap(self) -> T {
match self {
@@ -1062,7 +1076,7 @@ pub mod option {
}
}
- pub fn and<U>(self, optb: Option<U>) -> Option<U> {
+ pub fn and<U>(self, _optb: Option<U>) -> Option<U> {
loop {}
}
pub fn unwrap_or(self, default: T) -> T {
@@ -1080,25 +1094,25 @@ pub mod option {
}
// endregion:result
// region:fn
- pub fn and_then<U, F>(self, f: F) -> Option<U>
+ pub fn and_then<U, F>(self, _f: F) -> Option<U>
where
F: FnOnce(T) -> Option<U>,
{
loop {}
}
- pub fn unwrap_or_else<F>(self, f: F) -> T
+ pub fn unwrap_or_else<F>(self, _f: F) -> T
where
F: FnOnce() -> T,
{
loop {}
}
- pub fn map_or<U, F>(self, default: U, f: F) -> U
+ pub fn map_or<U, F>(self, _default: U, _f: F) -> U
where
F: FnOnce(T) -> U,
{
loop {}
}
- pub fn map_or_else<U, D, F>(self, default: D, f: F) -> U
+ pub fn map_or_else<U, D, F>(self, _default: D, _f: F) -> U
where
D: FnOnce() -> U,
F: FnOnce(T) -> U,
@@ -1129,7 +1143,7 @@ pub mod pin {
pointer: P,
}
impl<P> Pin<P> {
- pub fn new(pointer: P) -> Pin<P> {
+ pub fn new(_pointer: P) -> Pin<P> {
loop {}
}
}
@@ -1226,7 +1240,7 @@ pub mod iter {
mod sources {
mod repeat {
- pub fn repeat<T>(elt: T) -> Repeat<T> {
+ pub fn repeat<T>(_elt: T) -> Repeat<T> {
loop {}
}
@@ -1266,7 +1280,7 @@ pub mod iter {
fn take(self, n: usize) -> crate::iter::Take<Self> {
loop {}
}
- fn filter_map<B, F>(self, f: F) -> crate::iter::FilterMap<Self, F>
+ fn filter_map<B, F>(self, _f: F) -> crate::iter::FilterMap<Self, F>
where
Self: Sized,
F: FnMut(Self::Item) -> Option<B>,
@@ -1337,7 +1351,7 @@ mod panic {
mod panicking {
#[lang = "panic_fmt"]
- pub const fn panic_fmt(fmt: crate::fmt::Arguments<'_>) -> ! {
+ pub const fn panic_fmt(_fmt: crate::fmt::Arguments<'_>) -> ! {
loop {}
}
}
@@ -1346,7 +1360,7 @@ mod panicking {
mod macros {
// region:panic
#[macro_export]
- #[rustc_builtin_macro(std_panic)]
+ #[rustc_builtin_macro(core_panic)]
macro_rules! panic {
($($arg:tt)*) => {
/* compiler built-in */
@@ -1354,6 +1368,26 @@ mod macros {
}
// endregion:panic
+ // region:asm
+ #[macro_export]
+ #[rustc_builtin_macro]
+ macro_rules! asm {
+ ($($arg:tt)*) => {
+ /* compiler built-in */
+ };
+ }
+ // endregion:asm
+
+ // region:assert
+ #[macro_export]
+ #[rustc_builtin_macro]
+ macro_rules! assert {
+ ($($arg:tt)*) => {
+ /* compiler built-in */
+ };
+ }
+ // endregion:assert
+
// region:fmt
#[macro_export]
#[rustc_builtin_macro]
@@ -1370,6 +1404,13 @@ mod macros {
}
#[macro_export]
+ #[rustc_builtin_macro]
+ macro_rules! format_args_nl {
+ ($fmt:expr) => {{ /* compiler built-in */ }};
+ ($fmt:expr, $($args:tt)*) => {{ /* compiler built-in */ }};
+ }
+
+ #[macro_export]
macro_rules! print {
($($arg:tt)*) => {{
$crate::io::_print($crate::format_args!($($arg)*));
@@ -1399,6 +1440,12 @@ mod macros {
($file:expr $(,)?) => {{ /* compiler built-in */ }};
}
// endregion:include
+
+ // region:concat
+ #[rustc_builtin_macro]
+ #[macro_export]
+ macro_rules! concat {}
+ // endregion:concat
}
// region:non_zero
diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
index 76d0ca5cc..4620cc72d 100644
--- a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
@@ -12,5 +12,5 @@ rust-version.workspace = true
doctest = false
[dependencies]
-itertools = "0.10.5"
+itertools.workspace = true
text-size.workspace = true
diff --git a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
index 4705d1818..fb52a50f0 100644
--- a/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/text-edit/src/lib.rs
@@ -4,7 +4,7 @@
//! so `TextEdit` is the ultimate representation of the work done by
//! rust-analyzer.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use itertools::Itertools;
use std::cmp::max;
diff --git a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
index 729f84a81..997f339ed 100644
--- a/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/toolchain/src/lib.rs
@@ -1,6 +1,6 @@
//! Discovery of `cargo` & `rustc` executables.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::{env, iter, path::PathBuf};
diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml
index a28ee5f1c..572224497 100644
--- a/src/tools/rust-analyzer/crates/tt/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml
@@ -13,5 +13,6 @@ doctest = false
[dependencies]
smol_str.workspace = true
+text-size.workspace = true
stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
index b5a72bec0..481d57540 100644
--- a/src/tools/rust-analyzer/crates/tt/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -2,115 +2,98 @@
//! input and output) of macros. It closely mirrors `proc_macro` crate's
//! `TokenTree`.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::fmt;
use stdx::impl_from;
pub use smol_str::SmolStr;
+pub use text_size::{TextRange, TextSize};
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+pub struct SpanData<Anchor, Ctx> {
+ /// The text range of this span, relative to the anchor.
+ /// We need the anchor for incrementality, as storing absolute ranges will require
+ /// recomputation on every change in a file at all times.
+ pub range: TextRange,
+ pub anchor: Anchor,
+ /// The syntax context of the span.
+ pub ctx: Ctx,
+}
-/// Represents identity of the token.
-///
-/// For hygiene purposes, we need to track which expanded tokens originated from
-/// which source tokens. We do it by assigning an distinct identity to each
-/// source token and making sure that identities are preserved during macro
-/// expansion.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-pub struct TokenId(pub u32);
+impl<Anchor: SpanAnchor, Ctx: SyntaxContext> Span for SpanData<Anchor, Ctx> {
+ #[allow(deprecated)]
+ const DUMMY: Self = SpanData {
+ range: TextRange::empty(TextSize::new(0)),
+ anchor: Anchor::DUMMY,
+ ctx: Ctx::DUMMY,
+ };
+}
-impl fmt::Debug for TokenId {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.0.fmt(f)
- }
+pub trait Span: std::fmt::Debug + Copy + Sized + Eq {
+ // FIXME: Should not exist. Dummy spans will always be wrong if they leak somewhere. Instead,
+ // the call site or def site spans should be used in relevant places, its just that we don't
+ // expose those everywhere in the yet.
+ const DUMMY: Self;
}
-impl TokenId {
- pub const UNSPECIFIED: TokenId = TokenId(!0);
- pub const fn unspecified() -> TokenId {
- Self::UNSPECIFIED
- }
+// FIXME: Should not exist
+pub trait SpanAnchor:
+ std::fmt::Debug + Copy + Sized + Eq + Copy + fmt::Debug + std::hash::Hash
+{
+ #[deprecated(note = "this should not exist")]
+ const DUMMY: Self;
}
-pub mod token_id {
- pub use crate::{DelimiterKind, Spacing, TokenId};
- pub type Span = crate::TokenId;
- pub type Subtree = crate::Subtree<Span>;
- pub type Punct = crate::Punct<Span>;
- pub type Delimiter = crate::Delimiter<Span>;
- pub type Leaf = crate::Leaf<Span>;
- pub type Ident = crate::Ident<Span>;
- pub type Literal = crate::Literal<Span>;
- pub type TokenTree = crate::TokenTree<Span>;
- pub mod buffer {
- pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>;
- pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>;
- pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>;
- }
+// FIXME: Should not exist
+pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq {
+ #[deprecated(note = "this should not exist")]
+ const DUMMY: Self;
+}
- impl Delimiter {
- pub const UNSPECIFIED: Self = Self {
- open: TokenId::UNSPECIFIED,
- close: TokenId::UNSPECIFIED,
- kind: DelimiterKind::Invisible,
- };
- pub const fn unspecified() -> Self {
- Self::UNSPECIFIED
- }
- }
- impl Subtree {
- pub const fn empty() -> Self {
- Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }
- }
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum TokenTree<S> {
+ Leaf(Leaf<S>),
+ Subtree(Subtree<S>),
+}
+impl_from!(Leaf<S>, Subtree<S> for TokenTree);
+impl<S: Span> TokenTree<S> {
+ pub const fn empty(span: S) -> Self {
+ Self::Subtree(Subtree {
+ delimiter: Delimiter::invisible_spanned(span),
+ token_trees: vec![],
+ })
}
- impl TokenTree {
- pub const fn empty() -> Self {
- Self::Subtree(Subtree::empty())
+
+ pub fn subtree_or_wrap(self) -> Subtree<S> {
+ match self {
+ TokenTree::Leaf(_) => {
+ Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] }
+ }
+ TokenTree::Subtree(s) => s,
}
}
-
- impl Subtree {
- pub fn visit_ids(&mut self, f: &mut impl FnMut(TokenId) -> TokenId) {
- self.delimiter.open = f(self.delimiter.open);
- self.delimiter.close = f(self.delimiter.close);
- self.token_trees.iter_mut().for_each(|tt| match tt {
- crate::TokenTree::Leaf(leaf) => match leaf {
- crate::Leaf::Literal(it) => it.span = f(it.span),
- crate::Leaf::Punct(it) => it.span = f(it.span),
- crate::Leaf::Ident(it) => it.span = f(it.span),
- },
- crate::TokenTree::Subtree(s) => s.visit_ids(f),
- })
+ pub fn subtree_or_wrap2(self, span: DelimSpan<S>) -> Subtree<S> {
+ match self {
+ TokenTree::Leaf(_) => Subtree {
+ delimiter: Delimiter::invisible_delim_spanned(span),
+ token_trees: vec![self],
+ },
+ TokenTree::Subtree(s) => s,
}
}
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct SyntaxContext(pub u32);
-
-// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-// pub struct Span {
-// pub id: TokenId,
-// pub ctx: SyntaxContext,
-// }
-// pub type Span = (TokenId, SyntaxContext);
-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum TokenTree<Span> {
- Leaf(Leaf<Span>),
- Subtree(Subtree<Span>),
+pub enum Leaf<S> {
+ Literal(Literal<S>),
+ Punct(Punct<S>),
+ Ident(Ident<S>),
}
-impl_from!(Leaf<Span>, Subtree<Span> for TokenTree);
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum Leaf<Span> {
- Literal(Literal<Span>),
- Punct(Punct<Span>),
- Ident(Ident<Span>),
-}
-
-impl<Span> Leaf<Span> {
- pub fn span(&self) -> &Span {
+impl<S> Leaf<S> {
+ pub fn span(&self) -> &S {
match self {
Leaf::Literal(it) => &it.span,
Leaf::Punct(it) => &it.span,
@@ -118,21 +101,74 @@ impl<Span> Leaf<Span> {
}
}
}
-impl_from!(Literal<Span>, Punct<Span>, Ident<Span> for Leaf);
+impl_from!(Literal<S>, Punct<S>, Ident<S> for Leaf);
#[derive(Clone, PartialEq, Eq, Hash)]
-pub struct Subtree<Span> {
- pub delimiter: Delimiter<Span>,
- pub token_trees: Vec<TokenTree<Span>>,
+pub struct Subtree<S> {
+ pub delimiter: Delimiter<S>,
+ pub token_trees: Vec<TokenTree<S>>,
+}
+
+impl<S: Span> Subtree<S> {
+ pub const fn empty(span: DelimSpan<S>) -> Self {
+ Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: vec![] }
+ }
+
+ pub fn visit_ids(&mut self, f: &mut impl FnMut(S) -> S) {
+ self.delimiter.open = f(self.delimiter.open);
+ self.delimiter.close = f(self.delimiter.close);
+ self.token_trees.iter_mut().for_each(|tt| match tt {
+ crate::TokenTree::Leaf(leaf) => match leaf {
+ crate::Leaf::Literal(it) => it.span = f(it.span),
+ crate::Leaf::Punct(it) => it.span = f(it.span),
+ crate::Leaf::Ident(it) => it.span = f(it.span),
+ },
+ crate::TokenTree::Subtree(s) => s.visit_ids(f),
+ })
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct DelimSpan<S> {
+ pub open: S,
+ pub close: S,
+}
+
+impl<S: Span> DelimSpan<S> {
+ // FIXME should not exist
+ pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY };
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct Delimiter<Span> {
- pub open: Span,
- pub close: Span,
+pub struct Delimiter<S> {
+ pub open: S,
+ pub close: S,
pub kind: DelimiterKind,
}
+impl<S: Span> Delimiter<S> {
+ // FIXME should not exist
+ pub const DUMMY_INVISIBLE: Self =
+ Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible };
+
+ // FIXME should not exist
+ pub const fn dummy_invisible() -> Self {
+ Self::DUMMY_INVISIBLE
+ }
+
+ pub const fn invisible_spanned(span: S) -> Self {
+ Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
+ }
+
+ pub const fn invisible_delim_spanned(span: DelimSpan<S>) -> Self {
+ Delimiter { open: span.open, close: span.close, kind: DelimiterKind::Invisible }
+ }
+
+ pub fn delim_span(&self) -> DelimSpan<S> {
+ DelimSpan { open: self.open, close: self.close }
+ }
+}
+
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DelimiterKind {
Parenthesis,
@@ -142,16 +178,16 @@ pub enum DelimiterKind {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Literal<Span> {
+pub struct Literal<S> {
pub text: SmolStr,
- pub span: Span,
+ pub span: S,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct Punct<Span> {
+pub struct Punct<S> {
pub char: char,
pub spacing: Spacing,
- pub span: Span,
+ pub span: S,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -162,9 +198,9 @@ pub enum Spacing {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
/// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier.
-pub struct Ident<Span> {
+pub struct Ident<S> {
pub text: SmolStr,
- pub span: Span,
+ pub span: S,
}
impl<S> Ident<S> {
@@ -173,9 +209,9 @@ impl<S> Ident<S> {
}
}
-fn print_debug_subtree<Span: fmt::Debug>(
+fn print_debug_subtree<S: fmt::Debug>(
f: &mut fmt::Formatter<'_>,
- subtree: &Subtree<Span>,
+ subtree: &Subtree<S>,
level: usize,
) -> fmt::Result {
let align = " ".repeat(level);
@@ -203,9 +239,9 @@ fn print_debug_subtree<Span: fmt::Debug>(
Ok(())
}
-fn print_debug_token<Span: fmt::Debug>(
+fn print_debug_token<S: fmt::Debug>(
f: &mut fmt::Formatter<'_>,
- tkn: &TokenTree<Span>,
+ tkn: &TokenTree<S>,
level: usize,
) -> fmt::Result {
let align = " ".repeat(level);
@@ -231,13 +267,13 @@ fn print_debug_token<Span: fmt::Debug>(
Ok(())
}
-impl<Span: fmt::Debug> fmt::Debug for Subtree<Span> {
+impl<S: fmt::Debug> fmt::Debug for Subtree<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
print_debug_subtree(f, self, 0)
}
}
-impl<Span> fmt::Display for TokenTree<Span> {
+impl<S> fmt::Display for TokenTree<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TokenTree::Leaf(it) => fmt::Display::fmt(it, f),
@@ -246,7 +282,7 @@ impl<Span> fmt::Display for TokenTree<Span> {
}
}
-impl<Span> fmt::Display for Subtree<Span> {
+impl<S> fmt::Display for Subtree<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let (l, r) = match self.delimiter.kind {
DelimiterKind::Parenthesis => ("(", ")"),
@@ -274,7 +310,7 @@ impl<Span> fmt::Display for Subtree<Span> {
}
}
-impl<Span> fmt::Display for Leaf<Span> {
+impl<S> fmt::Display for Leaf<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Leaf::Ident(it) => fmt::Display::fmt(it, f),
@@ -284,25 +320,25 @@ impl<Span> fmt::Display for Leaf<Span> {
}
}
-impl<Span> fmt::Display for Ident<Span> {
+impl<S> fmt::Display for Ident<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.text, f)
}
}
-impl<Span> fmt::Display for Literal<Span> {
+impl<S> fmt::Display for Literal<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.text, f)
}
}
-impl<Span> fmt::Display for Punct<Span> {
+impl<S> fmt::Display for Punct<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.char, f)
}
}
-impl<Span> Subtree<Span> {
+impl<S> Subtree<S> {
/// Count the number of tokens recursively
pub fn count(&self) -> usize {
let children_count = self
@@ -318,7 +354,7 @@ impl<Span> Subtree<Span> {
}
}
-impl<Span> Subtree<Span> {
+impl<S> Subtree<S> {
/// A simple line string used for debugging
pub fn as_debug_string(&self) -> String {
let delim = match self.delimiter.kind {
@@ -366,8 +402,8 @@ impl<Span> Subtree<Span> {
pub mod buffer;
-pub fn pretty<Span>(tkns: &[TokenTree<Span>]) -> String {
- fn tokentree_to_text<Span>(tkn: &TokenTree<Span>) -> String {
+pub fn pretty<S>(tkns: &[TokenTree<S>]) -> String {
+ fn tokentree_to_text<S>(tkn: &TokenTree<S>) -> String {
match tkn {
TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(),
TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(),
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
index 11055f028..fe6cb0a2c 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -12,10 +12,9 @@ rust-version.workspace = true
doctest = false
[dependencies]
-tracing = "0.1.35"
+tracing.workspace = true
walkdir = "2.3.2"
crossbeam-channel = "0.5.5"
-# We demand 5.1.0 as any higher version pulls in a new windows-sys dupe
notify = "6.1.1"
stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
index abfc51dfe..030650437 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs
@@ -7,7 +7,7 @@
//! Hopefully, one day a reliable file watching/walking crate appears on
//! crates.io, and we can reduce this to trivial glue code.
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
use std::fs;
diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
index c35785cf9..11409f2eb 100644
--- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
fst = "0.4.7"
-indexmap = "2.0.0"
+indexmap.workspace = true
nohash-hasher.workspace = true
paths.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
index 06004adad..ef5b10ee9 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
@@ -38,7 +38,7 @@
//! [`Handle`]: loader::Handle
//! [`Entries`]: loader::Entry
-#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(rust_2018_idioms, unused_lifetimes)]
mod anchored_path;
pub mod file_set;
@@ -60,7 +60,26 @@ pub use paths::{AbsPath, AbsPathBuf};
///
/// Most functions in rust-analyzer use this when they need to refer to a file.
#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
-pub struct FileId(pub u32);
+pub struct FileId(u32);
+// pub struct FileId(NonMaxU32);
+
+impl FileId {
+ /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
+ // FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
+ pub const BOGUS: FileId = FileId(0xe4e4e);
+ pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
+
+ #[inline]
+ pub const fn from_raw(raw: u32) -> FileId {
+ assert!(raw <= Self::MAX_FILE_ID);
+ FileId(raw)
+ }
+
+ #[inline]
+ pub fn index(self) -> u32 {
+ self.0
+ }
+}
/// safe because `FileId` is a newtype of `u32`
impl nohash_hasher::IsEnabled for FileId {}