summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
commitd1b2d29528b7794b41e66fc2136e395a02f8529b (patch)
treea4a17504b260206dec3cf55b2dca82929a348ac2 /src/tools/rust-analyzer
parentReleasing progress-linux version 1.72.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.tar.xz
rustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.zip
Merging upstream version 1.73.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer')
-rw-r--r--src/tools/rust-analyzer/.editorconfig13
-rw-r--r--src/tools/rust-analyzer/Cargo.lock472
-rw-r--r--src/tools/rust-analyzer/Cargo.toml24
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/fixture.rs114
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/input.rs6
-rw-r--r--src/tools/rust-analyzer/crates/cfg/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs42
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs71
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs47
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs131
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/db.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expander.rs87
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/find_path.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/hir.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs645
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs20
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs171
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lower.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs76
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs91
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs53
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs120
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs15
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs124
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/pretty.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs68
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs79
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs196
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs478
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs237
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs50
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs249
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs68
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs453
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs355
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs24
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs115
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs44
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs46
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs54
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs47
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs20
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs152
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs28
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs98
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs54
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir.rs41
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs1636
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs651
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs177
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs118
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs689
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs77
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs48
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs38
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs67
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs68
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/attrs.rs41
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/diagnostics.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/from_id.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/has_source.rs36
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs250
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs31
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs23
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs184
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs158
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs75
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs125
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs164
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs113
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs166
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs1051
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs106
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs (renamed from src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs)438
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs175
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs429
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs143
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs22
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs56
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs739
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs93
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs140
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/item.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs53
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs60
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/source_change.rs233
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs208
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs181
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs41
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs21
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs75
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs262
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/search.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs35
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs88
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs89
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/interpret_function.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/moniker.rs31
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/navigation_target.rs37
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs327
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs61
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/ssr.rs57
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs72
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs36
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html17
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html16
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html3
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs27
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs409
-rw-r--r--src/tools/rust-analyzer/crates/intern/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/Cargo.toml25
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/src/lib.rs441
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs11
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs10
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs12
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs42
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs84
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs148
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/token_map.rs11
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs136
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs19
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/shortcuts.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast48
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast44
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs1
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast63
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs1
-rw-r--r--src/tools/rust-analyzer/crates/paths/src/lib.rs15
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs11
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs21
-rw-r--r--src/tools/rust-analyzer/crates/profile/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/tree.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs17
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs27
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs23
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs8
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs6
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/sysroot.rs22
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs3
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs92
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml13
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs94
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs64
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs18
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs166
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs12
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs32
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs205
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs59
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs89
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs72
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs15
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs30
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs27
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs12
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs28
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs260
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs13
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs27
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs18
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs301
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs621
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs26
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/stdx/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram6
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs85
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs30
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs93
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs61
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs103
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs1
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/fixture.rs95
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs69
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/buffer.rs6
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/lib.rs18
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml3
-rw-r--r--src/tools/rust-analyzer/crates/vfs/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs8
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md47
-rw-r--r--src/tools/rust-analyzer/docs/dev/style.md13
-rw-r--r--src/tools/rust-analyzer/docs/user/manual.adoc93
-rw-r--r--src/tools/rust-analyzer/lib/README.md9
-rw-r--r--src/tools/rust-analyzer/lib/la-arena/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/lib/line-index/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/lib/line-index/src/lib.rs218
-rw-r--r--src/tools/rust-analyzer/lib/line-index/src/tests.rs129
-rw-r--r--src/tools/rust-analyzer/lib/line-index/tests/it.rs62
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/Cargo.toml6
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/msg.rs2
-rw-r--r--src/tools/rust-analyzer/triagebot.toml4
-rw-r--r--src/tools/rust-analyzer/xtask/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/xtask/src/flags.rs29
-rw-r--r--src/tools/rust-analyzer/xtask/src/install.rs10
-rw-r--r--src/tools/rust-analyzer/xtask/src/metrics.rs76
-rw-r--r--src/tools/rust-analyzer/xtask/src/publish.rs11
357 files changed, 16541 insertions, 6113 deletions
diff --git a/src/tools/rust-analyzer/.editorconfig b/src/tools/rust-analyzer/.editorconfig
index 314f79d3f..f00ade5fd 100644
--- a/src/tools/rust-analyzer/.editorconfig
+++ b/src/tools/rust-analyzer/.editorconfig
@@ -7,13 +7,10 @@ trim_trailing_whitespace = true
end_of_line = lf
insert_final_newline = true
indent_style = space
-
-[*.{rs,toml}]
indent_size = 4
-[*.ts]
-indent_size = 4
-[*.js]
-indent_size = 4
-[*.json]
-indent_size = 4
+[*.md]
+indent_size = 2
+
+[*.{yml, yaml}]
+indent_size = 2
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index 13cb25f7b..a2b263cf2 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -28,9 +28,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.70"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
+checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "anymap"
@@ -46,9 +46,9 @@ checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e"
[[package]]
name = "arrayvec"
-version = "0.7.2"
+version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
+checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
[[package]]
name = "atty"
@@ -77,8 +77,8 @@ dependencies = [
"cc",
"cfg-if",
"libc",
- "miniz_oxide",
- "object",
+ "miniz_oxide 0.6.2",
+ "object 0.30.4",
"rustc-demangle",
]
@@ -87,7 +87,7 @@ name = "base-db"
version = "0.0.0"
dependencies = [
"cfg",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"profile",
"rustc-hash",
"salsa",
@@ -107,9 +107,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.1.0"
+version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3"
+checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded"
[[package]]
name = "byteorder"
@@ -177,21 +177,21 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c59178fded594fe78c47b841520e5a4399d00fe15fffee19b945958a878cd02d"
+checksum = "ff5053a8a42dbff5279a82423946fc56dc1253b76cf211b2b3c14b3aad4e1281"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.15",
+ "syn 2.0.18",
"synstructure",
]
[[package]]
name = "chalk-ir"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8824be92876823b828d551bb792f79eb1f69c69d1948abf69fccbf84e448e57b"
+checksum = "8a56de2146a8ed0fcd54f4bd50db852f1de4eac9e1efe568494f106c21b77d2a"
dependencies = [
"bitflags 1.3.2",
"chalk-derive",
@@ -200,9 +200,9 @@ dependencies = [
[[package]]
name = "chalk-recursive"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e110d1260809c238072d1c8ef84060e39983e8ea9d4c6f74b19b0ebbf8904dc"
+checksum = "5cc09e6e9531f3544989ef89b189e80fbc7ad9e2f73f1c5e03ddc9ffb0527463"
dependencies = [
"chalk-derive",
"chalk-ir",
@@ -213,14 +213,14 @@ dependencies = [
[[package]]
name = "chalk-solve"
-version = "0.91.0"
+version = "0.92.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "12200b19abf4b0633095f7bd099f3ef609d314754b6adb358c68cc04d10589e5"
+checksum = "b392e02b4c81ec76d3748da839fc70a5539b83d27c9030668463d34d5110b860"
dependencies = [
"chalk-derive",
"chalk-ir",
"ena",
- "indexmap",
+ "indexmap 1.9.3",
"itertools",
"petgraph",
"rustc-hash",
@@ -286,22 +286,22 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
-version = "0.9.14"
+version = "0.9.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695"
+checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7"
dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
- "memoffset",
+ "memoffset 0.9.0",
"scopeguard",
]
[[package]]
name = "crossbeam-utils"
-version = "0.8.15"
+version = "0.8.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b"
+checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294"
dependencies = [
"cfg-if",
]
@@ -313,7 +313,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
dependencies = [
"cfg-if",
- "hashbrown",
+ "hashbrown 0.12.3",
"lock_api",
"once_cell",
"parking_lot_core 0.9.6",
@@ -321,13 +321,13 @@ dependencies = [
[[package]]
name = "derive_arbitrary"
-version = "1.3.0"
+version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3cdeb9ec472d588e539a818b2dee436825730da08ad0017c4b1a17676bdc8b7"
+checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
@@ -364,6 +364,12 @@ dependencies = [
]
[[package]]
+name = "equivalent"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1"
+
+[[package]]
name = "expect-test"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -382,7 +388,7 @@ dependencies = [
"cfg-if",
"libc",
"redox_syscall",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -393,12 +399,12 @@ checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
[[package]]
name = "flate2"
-version = "1.0.25"
+version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
+checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743"
dependencies = [
"crc32fast",
- "miniz_oxide",
+ "miniz_oxide 0.7.1",
]
[[package]]
@@ -419,9 +425,9 @@ dependencies = [
[[package]]
name = "form_urlencoded"
-version = "1.1.0"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
dependencies = [
"percent-encoding",
]
@@ -443,9 +449,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
[[package]]
name = "gimli"
-version = "0.27.2"
+version = "0.27.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4"
+checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
[[package]]
name = "hashbrown"
@@ -454,6 +460,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
+name = "hashbrown"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
+
+[[package]]
name = "heck"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -509,7 +521,7 @@ dependencies = [
"anymap",
"arrayvec",
"base-db",
- "bitflags 2.1.0",
+ "bitflags 2.3.2",
"cfg",
"cov-mark",
"dashmap",
@@ -517,14 +529,14 @@ dependencies = [
"either",
"expect-test",
"fst",
- "hashbrown",
+ "hashbrown 0.12.3",
"hir-expand",
"hkalbasi-rustc-ap-rustc_abi",
"hkalbasi-rustc-ap-rustc_index",
- "indexmap",
+ "indexmap 2.0.0",
"intern",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"once_cell",
@@ -548,10 +560,10 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
- "hashbrown",
+ "hashbrown 0.12.3",
"intern",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"profile",
@@ -570,7 +582,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.1.0",
+ "bitflags 2.3.2",
"chalk-derive",
"chalk-ir",
"chalk-recursive",
@@ -584,10 +596,11 @@ dependencies = [
"hkalbasi-rustc-ap-rustc_index",
"intern",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"nohash-hasher",
"once_cell",
+ "oorandom",
"profile",
"project-model",
"rustc-hash",
@@ -626,11 +639,11 @@ dependencies = [
[[package]]
name = "home"
-version = "0.5.4"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
+checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb"
dependencies = [
- "winapi",
+ "windows-sys 0.48.0",
]
[[package]]
@@ -715,10 +728,10 @@ dependencies = [
"expect-test",
"fst",
"hir",
- "indexmap",
+ "indexmap 2.0.0",
"itertools",
"limit",
- "line-index",
+ "line-index 0.1.0-pre.1",
"memchr",
"nohash-hasher",
"once_cell",
@@ -748,6 +761,7 @@ dependencies = [
"hir",
"ide-db",
"itertools",
+ "once_cell",
"profile",
"serde_json",
"sourcegen",
@@ -777,9 +791,9 @@ dependencies = [
[[package]]
name = "idna"
-version = "0.3.0"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
dependencies = [
"unicode-bidi",
"unicode-normalization",
@@ -792,7 +806,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
- "hashbrown",
+ "hashbrown 0.12.3",
+]
+
+[[package]]
+name = "indexmap"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
+dependencies = [
+ "equivalent",
+ "hashbrown 0.14.0",
]
[[package]]
@@ -829,7 +853,7 @@ name = "intern"
version = "0.0.0"
dependencies = [
"dashmap",
- "hashbrown",
+ "hashbrown 0.12.3",
"once_cell",
"rustc-hash",
"triomphe",
@@ -878,7 +902,13 @@ dependencies = [
[[package]]
name = "la-arena"
-version = "0.3.0"
+version = "0.3.1"
+
+[[package]]
+name = "la-arena"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3752f229dcc5a481d60f385fa479ff46818033d881d2d801aa27dffcfb5e8306"
[[package]]
name = "lazy_static"
@@ -888,25 +918,25 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.141"
+version = "0.2.146"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5"
+checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
[[package]]
name = "libloading"
-version = "0.7.4"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
+checksum = "d580318f95776505201b28cf98eb1fa5e4be3b689633ba6a3e6cd880ff22d8cb"
dependencies = [
"cfg-if",
- "winapi",
+ "windows-sys 0.48.0",
]
[[package]]
name = "libmimalloc-sys"
-version = "0.1.32"
+version = "0.1.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "43a558e3d911bc3c7bfc8c78bc580b404d6e51c1cefbf656e176a94b49b0df40"
+checksum = "f4ac0e912c8ef1b735e92369695618dc5b1819f5a7bf3f167301a3ba1cea515e"
dependencies = [
"cc",
"libc",
@@ -919,16 +949,43 @@ version = "0.0.0"
[[package]]
name = "line-index"
version = "0.1.0-pre.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2cad96769710c1745e11d4f940a8ff36000ade4bbada4285b001cb8aa2f745ce"
dependencies = [
"nohash-hasher",
"text-size",
]
[[package]]
+name = "line-index"
+version = "0.1.0"
+dependencies = [
+ "nohash-hasher",
+ "text-size",
+]
+
+[[package]]
+name = "load-cargo"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "crossbeam-channel",
+ "ide",
+ "ide-db",
+ "itertools",
+ "proc-macro-api",
+ "project-model",
+ "tracing",
+ "tt",
+ "vfs",
+ "vfs-notify",
+]
+
+[[package]]
name = "lock_api"
-version = "0.4.9"
+version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
+checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16"
dependencies = [
"autocfg",
"scopeguard",
@@ -936,16 +993,25 @@ dependencies = [
[[package]]
name = "log"
-version = "0.4.17"
+version = "0.4.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
+
+[[package]]
+name = "lsp-server"
+version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+checksum = "3711e4d6f491dc9edc0f1df80e204f38206775ac92c1241e89b79229a850bc00"
dependencies = [
- "cfg-if",
+ "crossbeam-channel",
+ "log",
+ "serde",
+ "serde_json",
]
[[package]]
name = "lsp-server"
-version = "0.7.0"
+version = "0.7.2"
dependencies = [
"crossbeam-channel",
"log",
@@ -968,15 +1034,6 @@ dependencies = [
]
[[package]]
-name = "matchers"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
-dependencies = [
- "regex-automata",
-]
-
-[[package]]
name = "mbe"
version = "0.0.0"
dependencies = [
@@ -1016,10 +1073,19 @@ dependencies = [
]
[[package]]
+name = "memoffset"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
name = "mimalloc"
-version = "0.1.36"
+version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d88dad3f985ec267a3fcb7a1726f5cb1a7e8cad8b646e70a84f967210df23da"
+checksum = "4e2894987a3459f3ffb755608bd82188f8ed00d0ae077f1edea29c068d639d98"
dependencies = [
"libmimalloc-sys",
]
@@ -1034,6 +1100,15 @@ dependencies = [
]
[[package]]
+name = "miniz_oxide"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
+dependencies = [
+ "adler",
+]
+
+[[package]]
name = "mio"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1042,7 +1117,7 @@ dependencies = [
"libc",
"log",
"wasi",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1051,7 +1126,7 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
dependencies = [
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1087,7 +1162,7 @@ dependencies = [
"libc",
"mio",
"walkdir",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1112,18 +1187,27 @@ dependencies = [
[[package]]
name = "object"
-version = "0.30.3"
+version = "0.30.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439"
+checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "object"
+version = "0.32.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
-version = "1.17.1"
+version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
+checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "oorandom"
@@ -1182,7 +1266,7 @@ dependencies = [
"libc",
"redox_syscall",
"smallvec",
- "windows-sys",
+ "windows-sys 0.42.0",
]
[[package]]
@@ -1209,9 +1293,9 @@ version = "0.0.0"
[[package]]
name = "percent-encoding"
-version = "2.2.0"
+version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "perf-event"
@@ -1239,7 +1323,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
dependencies = [
"fixedbitset",
- "indexmap",
+ "indexmap 1.9.3",
]
[[package]]
@@ -1253,7 +1337,7 @@ name = "proc-macro-api"
version = "0.0.0"
dependencies = [
"memmap2",
- "object",
+ "object 0.32.0",
"paths",
"profile",
"serde",
@@ -1273,7 +1357,7 @@ dependencies = [
"libloading",
"mbe",
"memmap2",
- "object",
+ "object 0.32.0",
"paths",
"proc-macro-api",
"proc-macro-test",
@@ -1317,7 +1401,7 @@ version = "0.0.0"
dependencies = [
"cfg-if",
"countme",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc",
"once_cell",
"perf-event",
@@ -1335,7 +1419,7 @@ dependencies = [
"cfg",
"expect-test",
"itertools",
- "la-arena",
+ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"paths",
"profile",
"rustc-hash",
@@ -1370,9 +1454,9 @@ dependencies = [
[[package]]
name = "pulldown-cmark"
-version = "0.9.2"
+version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
+checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998"
dependencies = [
"bitflags 1.3.2",
"memchr",
@@ -1390,9 +1474,9 @@ dependencies = [
[[package]]
name = "quote"
-version = "1.0.26"
+version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
+checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
@@ -1439,38 +1523,14 @@ dependencies = [
]
[[package]]
-name = "regex"
-version = "1.7.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
-dependencies = [
- "regex-syntax",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
-dependencies = [
- "regex-syntax",
-]
-
-[[package]]
-name = "regex-syntax"
-version = "0.6.29"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
-
-[[package]]
name = "rowan"
version = "0.15.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf"
dependencies = [
"countme",
- "hashbrown",
- "memoffset",
+ "hashbrown 0.12.3",
+ "memoffset 0.8.0",
"rustc-hash",
"text-size",
]
@@ -1494,7 +1554,8 @@ dependencies = [
"ide-db",
"ide-ssr",
"itertools",
- "lsp-server",
+ "load-cargo",
+ "lsp-server 0.7.1",
"lsp-types",
"mbe",
"mimalloc",
@@ -1512,12 +1573,10 @@ dependencies = [
"scip",
"serde",
"serde_json",
- "serde_repr",
"sourcegen",
"stdx",
"syntax",
"test-utils",
- "thiserror",
"tikv-jemallocator",
"toolchain",
"tracing",
@@ -1525,7 +1584,6 @@ dependencies = [
"tracing-subscriber",
"tracing-tree",
"triomphe",
- "tt",
"vfs",
"vfs-notify",
"winapi",
@@ -1535,9 +1593,9 @@ dependencies = [
[[package]]
name = "rustc-demangle"
-version = "0.1.22"
+version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b"
+checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustc-hash"
@@ -1558,7 +1616,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a"
dependencies = [
"crossbeam-utils",
- "indexmap",
+ "indexmap 1.9.3",
"lock_api",
"log",
"oorandom",
@@ -1641,11 +1699,11 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.96"
+version = "1.0.97"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
+checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a"
dependencies = [
- "indexmap",
+ "indexmap 1.9.3",
"itoa",
"ryu",
"serde",
@@ -1653,13 +1711,13 @@ dependencies = [
[[package]]
name = "serde_repr"
-version = "0.1.11"
+version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "395627de918015623b32e7669714206363a7fc00382bf477e72c1f7533e8eafc"
+checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
@@ -1731,9 +1789,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "2.0.15"
+version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
+checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
dependencies = [
"proc-macro2",
"quote",
@@ -1748,7 +1806,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.15",
+ "syn 2.0.18",
"unicode-xid",
]
@@ -1759,7 +1817,7 @@ dependencies = [
"cov-mark",
"either",
"expect-test",
- "indexmap",
+ "indexmap 2.0.0",
"itertools",
"once_cell",
"parser",
@@ -1806,22 +1864,22 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
[[package]]
name = "thiserror"
-version = "1.0.39"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c"
+checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
-version = "1.0.39"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e"
+checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
@@ -1867,9 +1925,9 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.20"
+version = "0.3.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890"
+checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd"
dependencies = [
"serde",
"time-core",
@@ -1877,9 +1935,9 @@ dependencies = [
[[package]]
name = "time-core"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
+checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb"
[[package]]
name = "tinyvec"
@@ -1917,20 +1975,20 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.23"
+version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
+checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [
"proc-macro2",
"quote",
- "syn 1.0.109",
+ "syn 2.0.18",
]
[[package]]
name = "tracing-core"
-version = "0.1.30"
+version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
+checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
dependencies = [
"once_cell",
"valuable",
@@ -1949,25 +2007,21 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
-version = "0.3.16"
+version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70"
+checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77"
dependencies = [
- "matchers",
- "once_cell",
- "regex",
"sharded-slab",
"thread_local",
- "tracing",
"tracing-core",
"tracing-log",
]
[[package]]
name = "tracing-tree"
-version = "0.2.2"
+version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "758e983ab7c54fee18403994507e7f212b9005e957ce7984996fac8d11facedb"
+checksum = "4f9742d8df709837409dbb22aa25dd7769c260406f20ff48a2320b80a4a6aed0"
dependencies = [
"atty",
"nu-ansi-term",
@@ -2060,9 +2114,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
[[package]]
name = "unicode-ident"
-version = "1.0.8"
+version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
+checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
[[package]]
name = "unicode-normalization"
@@ -2087,9 +2141,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "url"
-version = "2.3.1"
+version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb"
dependencies = [
"form_urlencoded",
"idna",
@@ -2114,7 +2168,7 @@ name = "vfs"
version = "0.0.0"
dependencies = [
"fst",
- "indexmap",
+ "indexmap 2.0.0",
"nohash-hasher",
"paths",
"rustc-hash",
@@ -2187,13 +2241,37 @@ version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
+ "windows_aarch64_gnullvm 0.42.2",
+ "windows_aarch64_msvc 0.42.2",
+ "windows_i686_gnu 0.42.2",
+ "windows_i686_msvc 0.42.2",
+ "windows_x86_64_gnu 0.42.2",
+ "windows_x86_64_gnullvm 0.42.2",
+ "windows_x86_64_msvc 0.42.2",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5"
+dependencies = [
+ "windows_aarch64_gnullvm 0.48.0",
+ "windows_aarch64_msvc 0.48.0",
+ "windows_i686_gnu 0.48.0",
+ "windows_i686_msvc 0.48.0",
+ "windows_x86_64_gnu 0.48.0",
+ "windows_x86_64_gnullvm 0.48.0",
+ "windows_x86_64_msvc 0.48.0",
]
[[package]]
@@ -2203,42 +2281,84 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+
+[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
+name = "windows_aarch64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+
+[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
+name = "windows_i686_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+
+[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
+name = "windows_i686_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+
+[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
+name = "windows_x86_64_gnu"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+
+[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+
+[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
+name = "windows_x86_64_msvc"
+version = "0.48.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+
+[[package]]
name = "write-json"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2289,9 +2409,9 @@ dependencies = [
[[package]]
name = "zip"
-version = "0.6.4"
+version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef"
+checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
dependencies = [
"byteorder",
"crc32fast",
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 3050cf764..f6a50bfa6 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-test/imp"]
resolver = "2"
[workspace.package]
-rust-version = "1.66"
+rust-version = "1.70"
edition = "2021"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]
@@ -35,6 +35,10 @@ debug = 0
# chalk-ir = { path = "../chalk/chalk-ir" }
# chalk-recursive = { path = "../chalk/chalk-recursive" }
# chalk-derive = { path = "../chalk/chalk-derive" }
+# line-index = { path = "lib/line-index" }
+# la-arena = { path = "lib/la-arena" }
+# lsp-server = { path = "lib/lsp-server" }
+
# ungrammar = { path = "../ungrammar" }
@@ -57,13 +61,13 @@ ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
intern = { path = "./crates/intern", version = "0.0.0" }
limit = { path = "./crates/limit", version = "0.0.0" }
+load-cargo = { path = "./crates/load-cargo", version = "0.0.0" }
mbe = { path = "./crates/mbe", version = "0.0.0" }
parser = { path = "./crates/parser", version = "0.0.0" }
paths = { path = "./crates/paths", version = "0.0.0" }
proc-macro-api = { path = "./crates/proc-macro-api", version = "0.0.0" }
proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
-proc-macro-test = { path = "./crates/proc-macro-test", version = "0.0.0" }
profile = { path = "./crates/profile", version = "0.0.0" }
project-model = { path = "./crates/project-model", version = "0.0.0" }
sourcegen = { path = "./crates/sourcegen", version = "0.0.0" }
@@ -75,7 +79,14 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
tt = { path = "./crates/tt", version = "0.0.0" }
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
-line-index = { version = "0.1.0-pre.1", path = "./lib/line-index" }
+
+# local crates that aren't published to crates.io. These should not have versions.
+proc-macro-test = { path = "./crates/proc-macro-test" }
+
+# In-tree crates that are published separately and follow semver. See lib/README.md
+line-index = { version = "0.1.0-pre.1" }
+la-arena = { version = "0.3.1" }
+lsp-server = { version = "0.7.1" }
# non-local crates
smallvec = { version = "1.10.0", features = [
@@ -86,9 +97,10 @@ smallvec = { version = "1.10.0", features = [
smol_str = "0.2.0"
nohash-hasher = "0.2.0"
text-size = "1.1.0"
-# the following crates are pinned to prevent us from pulling in syn 2 until all our dependencies have moved
-serde = { version = "=1.0.156", features = ["derive"] }
-serde_json = "1.0.94"
+serde = { version = "1.0.156", features = ["derive"] }
+serde_json = "1.0.96"
triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
+# can't upgrade due to dashmap depending on 0.12.3 currently
+hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false }
rustc_lexer = { version = "0.1.0", package = "ra-ap-rustc_lexer" }
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
index 6001772c8..171c113a9 100644
--- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -17,7 +17,7 @@ rustc-hash = "1.1.0"
triomphe.workspace = true
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
# local deps
cfg.workspace = true
diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
index d3abc3870..323ee4260 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
@@ -26,7 +26,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
let fixture = ChangeFixture::parse(ra_fixture);
let mut db = Self::default();
fixture.change.apply(&mut db);
- assert_eq!(fixture.files.len(), 1);
+ assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture");
(db, fixture.files[0])
}
@@ -102,6 +102,8 @@ pub struct ChangeFixture {
pub change: Change,
}
+const SOURCE_ROOT_PREFIX: &str = "/";
+
impl ChangeFixture {
pub fn parse(ra_fixture: &str) -> ChangeFixture {
Self::parse_with_proc_macros(ra_fixture, Vec::new())
@@ -131,7 +133,6 @@ impl ChangeFixture {
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
- let source_root_prefix = "/".to_string();
let mut file_id = FileId(0);
let mut roots = Vec::new();
@@ -151,19 +152,23 @@ impl ChangeFixture {
entry.text.clone()
};
- let meta = FileMeta::from(entry);
- assert!(meta.path.starts_with(&source_root_prefix));
+ let meta = FileMeta::from_fixture(entry, current_source_root_kind);
+ assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX));
if !meta.deps.is_empty() {
assert!(meta.krate.is_some(), "can't specify deps without naming the crate")
}
- if let Some(kind) = &meta.introduce_new_source_root {
- let root = match current_source_root_kind {
+ if let Some(kind) = meta.introduce_new_source_root {
+ assert!(
+ meta.krate.is_some(),
+ "new_source_root meta doesn't make sense without crate meta"
+ );
+ let prev_kind = mem::replace(&mut current_source_root_kind, kind);
+ let prev_root = match prev_kind {
SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)),
SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)),
};
- roots.push(root);
- current_source_root_kind = *kind;
+ roots.push(prev_root);
}
if let Some((krate, origin, version)) = meta.krate {
@@ -185,7 +190,7 @@ impl ChangeFixture {
Some(toolchain),
);
let prev = crates.insert(crate_name.clone(), crate_id);
- assert!(prev.is_none());
+ assert!(prev.is_none(), "multiple crates with same name: {}", crate_name);
for dep in meta.deps {
let prelude = meta.extern_prelude.contains(&dep);
let dep = CrateName::normalize_dashes(&dep);
@@ -219,7 +224,7 @@ impl ChangeFixture {
false,
CrateOrigin::Local { repo: None, name: None },
default_target_data_layout
- .map(|x| x.into())
+ .map(|it| it.into())
.ok_or_else(|| "target_data_layout unset".into()),
Some(toolchain),
);
@@ -442,51 +447,74 @@ struct FileMeta {
target_data_layout: Option<String>,
}
-fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) {
- if let Some((a, b)) = crate_str.split_once('@') {
- let (version, origin) = match b.split_once(':') {
- Some(("CratesIo", data)) => match data.split_once(',') {
- Some((version, url)) => {
- (version, CrateOrigin::Local { repo: Some(url.to_owned()), name: None })
- }
- _ => panic!("Bad crates.io parameter: {data}"),
- },
- _ => panic!("Bad string for crate origin: {b}"),
- };
- (a.to_owned(), origin, Some(version.to_string()))
- } else {
- let crate_origin = match LangCrateOrigin::from(&*crate_str) {
- LangCrateOrigin::Other => CrateOrigin::Local { repo: None, name: None },
- origin => CrateOrigin::Lang(origin),
- };
- (crate_str, crate_origin, None)
- }
-}
-
-impl From<Fixture> for FileMeta {
- fn from(f: Fixture) -> FileMeta {
+impl FileMeta {
+ fn from_fixture(f: Fixture, current_source_root_kind: SourceRootKind) -> Self {
let mut cfg = CfgOptions::default();
- f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into()));
- f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into()));
+ for (k, v) in f.cfgs {
+ if let Some(v) = v {
+ cfg.insert_key_value(k.into(), v.into());
+ } else {
+ cfg.insert_atom(k.into());
+ }
+ }
+
+ let introduce_new_source_root = f.introduce_new_source_root.map(|kind| match &*kind {
+ "local" => SourceRootKind::Local,
+ "library" => SourceRootKind::Library,
+ invalid => panic!("invalid source root kind '{invalid}'"),
+ });
+ let current_source_root_kind =
+ introduce_new_source_root.unwrap_or(current_source_root_kind);
+
let deps = f.deps;
- FileMeta {
+ Self {
path: f.path,
- krate: f.krate.map(parse_crate),
+ krate: f.krate.map(|it| parse_crate(it, current_source_root_kind, f.library)),
extern_prelude: f.extern_prelude.unwrap_or_else(|| deps.clone()),
deps,
cfg,
- edition: f.edition.as_ref().map_or(Edition::CURRENT, |v| Edition::from_str(v).unwrap()),
+ edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()),
env: f.env.into_iter().collect(),
- introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind {
- "local" => SourceRootKind::Local,
- "library" => SourceRootKind::Library,
- invalid => panic!("invalid source root kind '{invalid}'"),
- }),
+ introduce_new_source_root,
target_data_layout: f.target_data_layout,
}
}
}
+fn parse_crate(
+ crate_str: String,
+ current_source_root_kind: SourceRootKind,
+ explicit_non_workspace_member: bool,
+) -> (String, CrateOrigin, Option<String>) {
+ // syntax:
+ // "my_awesome_crate"
+ // "my_awesome_crate@0.0.1,http://example.com"
+ let (name, repo, version) = if let Some((name, remain)) = crate_str.split_once('@') {
+ let (version, repo) =
+ remain.split_once(',').expect("crate meta: found '@' without version and url");
+ (name.to_owned(), Some(repo.to_owned()), Some(version.to_owned()))
+ } else {
+ (crate_str, None, None)
+ };
+
+ let non_workspace_member = explicit_non_workspace_member
+ || matches!(current_source_root_kind, SourceRootKind::Library);
+
+ let origin = match LangCrateOrigin::from(&*name) {
+ LangCrateOrigin::Other => {
+ let name = name.clone();
+ if non_workspace_member {
+ CrateOrigin::Library { repo, name }
+ } else {
+ CrateOrigin::Local { repo, name: Some(name) }
+ }
+ }
+ origin => CrateOrigin::Lang(origin),
+ };
+
+ (name, origin, version)
+}
+
// Identity mapping
#[derive(Debug)]
struct IdentityProcMacroExpander;
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index f2e523675..c47799f13 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -138,12 +138,12 @@ impl ops::Deref for CrateName {
}
}
-/// Origin of the crates. It is used in emitting monikers.
+/// Origin of the crates.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CrateOrigin {
- /// Crates that are from the rustc workspace
+ /// Crates that are from the rustc workspace.
Rustc { name: String },
- /// Crates that are workspace members,
+ /// Crates that are workspace members.
Local { repo: Option<String>, name: Option<String> },
/// Crates that are non member libraries.
Library { repo: Option<String>, name: String },
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index 0880bc239..ed3808972 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -18,13 +18,13 @@ rustc-hash = "1.1.0"
tt.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.4.1"
oorandom = "11.1.3"
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
# supports `arbitrary`. This way, we avoid feature unification.
-arbitrary = "1.2.2"
-derive_arbitrary = "1.2.2"
+arbitrary = "1.3.0"
+derive_arbitrary = "1.3.1"
# local deps
mbe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
index 495119d55..183b9b7d2 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -69,7 +69,7 @@ impl CfgOptions {
}
pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> {
- self.enabled.iter().map(|x| match x {
+ self.enabled.iter().map(|it| match it {
CfgAtom::Flag(key) => key,
CfgAtom::KeyValue { key, .. } => key,
})
@@ -79,7 +79,7 @@ impl CfgOptions {
&'a self,
cfg_key: &'a str,
) -> impl Iterator<Item = &'a SmolStr> + 'a {
- self.enabled.iter().filter_map(move |x| match x {
+ self.enabled.iter().filter_map(move |it| match it {
CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value),
_ => None,
})
diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
index 3f6671b1c..e7f7adc78 100644
--- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
@@ -12,9 +12,9 @@ rust-version.workspace = true
doctest = false
[dependencies]
-crossbeam-channel = "0.5.5"
+crossbeam-channel = "0.5.8"
tracing = "0.1.37"
-cargo_metadata = "0.15.0"
+cargo_metadata = "0.15.4"
rustc-hash = "1.1.0"
serde_json.workspace = true
serde.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index 83c705164..30307deb7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -21,14 +21,14 @@ dashmap = { version = "=5.4.0", features = ["raw-api"] }
drop_bomb = "0.1.5"
either = "1.7.0"
fst = { version = "0.4.7", default-features = false }
-hashbrown = { version = "0.12.1", default-features = false }
-indexmap = "1.9.1"
+indexmap = "2.0.0"
itertools = "0.10.5"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
once_cell = "1.17.0"
rustc-hash = "1.1.0"
-smallvec.workspace = true
tracing = "0.1.35"
+smallvec.workspace = true
+hashbrown.workspace = true
triomphe.workspace = true
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index bab3bbc23..fae071118 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -137,13 +137,16 @@ impl Attrs {
let cfg_options = &crate_graph[krate].cfg_options;
- let Some(variant) = enum_.variants.clone().filter(|variant| {
- let attrs = item_tree.attrs(db, krate, (*variant).into());
- attrs.is_cfg_enabled(cfg_options)
- })
- .zip(0u32..)
- .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx)))
- .map(|(variant, _idx)| variant)
+ let Some(variant) = enum_
+ .variants
+ .clone()
+ .filter(|variant| {
+ let attrs = item_tree.attrs(db, krate, (*variant).into());
+ attrs.is_cfg_enabled(cfg_options)
+ })
+ .zip(0u32..)
+ .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx)))
+ .map(|(variant, _idx)| variant)
else {
return Arc::new(res);
};
@@ -272,6 +275,25 @@ impl Attrs {
self.by_key("proc_macro_derive").exists()
}
+ pub fn is_test(&self) -> bool {
+ self.iter().any(|it| {
+ it.path()
+ .segments()
+ .iter()
+ .rev()
+ .zip(["core", "prelude", "v1", "test"].iter().rev())
+ .all(|it| it.0.as_str() == Some(it.1))
+ })
+ }
+
+ pub fn is_ignore(&self) -> bool {
+ self.by_key("ignore").exists()
+ }
+
+ pub fn is_bench(&self) -> bool {
+ self.by_key("bench").exists()
+ }
+
pub fn is_unstable(&self) -> bool {
self.by_key("unstable").exists()
}
@@ -282,7 +304,7 @@ use std::slice::Iter as SliceIter;
pub enum DocAtom {
/// eg. `#[doc(hidden)]`
Flag(SmolStr),
- /// eg. `#[doc(alias = "x")]`
+ /// eg. `#[doc(alias = "it")]`
///
/// Note that a key can have multiple values that are all considered "active" at the same time.
/// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`.
@@ -462,6 +484,8 @@ impl AttrsWithOwner {
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
+ AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it),
+ AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
};
let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
@@ -546,6 +570,8 @@ impl AttrsWithOwner {
.map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())),
},
AttrDefId::ExternBlockId(id) => any_has_attrs(db, id),
+ AttrDefId::ExternCrateId(id) => any_has_attrs(db, id),
+ AttrDefId::UseId(id) => any_has_attrs(db, id),
};
AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs))
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
index 94dc39b11..f8d492d0e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -273,10 +273,10 @@ impl Body {
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
match self.binding_owners.get(&binding) {
- Some(x) => {
+ Some(it) => {
// We assign expression ids in a way that outer closures will receive
// a lower id
- x.into_raw() < relative_to.into_raw()
+ it.into_raw() < relative_to.into_raw()
}
None => true,
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index b375ec63a..3853a6ab3 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -297,11 +297,11 @@ impl ExprCollector<'_> {
let (result_expr_id, prev_binding_owner) =
this.initialize_binding_owner(syntax_ptr);
let inner_expr = this.collect_block(e);
- let x = this.db.intern_anonymous_const(ConstBlockLoc {
+ let it = this.db.intern_anonymous_const(ConstBlockLoc {
parent: this.owner,
root: inner_expr,
});
- this.body.exprs[result_expr_id] = Expr::Const(x);
+ this.body.exprs[result_expr_id] = Expr::Const(it);
this.current_binding_owner = prev_binding_owner;
result_expr_id
})
@@ -313,21 +313,15 @@ impl ExprCollector<'_> {
let body = self.collect_labelled_block_opt(label, e.loop_body());
self.alloc_expr(Expr::Loop { body, label }, syntax_ptr)
}
- ast::Expr::WhileExpr(e) => {
- let label = e.label().map(|label| self.collect_label(label));
- let body = self.collect_labelled_block_opt(label, e.loop_body());
- let condition = self.collect_expr_opt(e.condition());
-
- self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr)
- }
+ ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e),
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
ast::Expr::CallExpr(e) => {
let is_rustc_box = {
let attrs = e.attrs();
- attrs.filter_map(|x| x.as_simple_atom()).any(|x| x == "rustc_box")
+ attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
};
if is_rustc_box {
- let expr = self.collect_expr_opt(e.arg_list().and_then(|x| x.args().next()));
+ let expr = self.collect_expr_opt(e.arg_list().and_then(|it| it.args().next()));
self.alloc_expr(Expr::Box { expr }, syntax_ptr)
} else {
let callee = self.collect_expr_opt(e.expr());
@@ -731,6 +725,32 @@ impl ExprCollector<'_> {
expr_id
}
+ /// Desugar `ast::WhileExpr` from: `[opt_ident]: while <cond> <body>` into:
+ /// ```ignore (pseudo-rust)
+ /// [opt_ident]: loop {
+ /// if <cond> {
+ /// <body>
+ /// }
+ /// else {
+ /// break;
+ /// }
+ /// }
+ /// ```
+ /// FIXME: Rustc wraps the condition in a construct equivalent to `{ let _t = <cond>; _t }`
+ /// to preserve drop semantics. We should probably do the same in future.
+ fn collect_while_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::WhileExpr) -> ExprId {
+ let label = e.label().map(|label| self.collect_label(label));
+ let body = self.collect_labelled_block_opt(label, e.loop_body());
+ let condition = self.collect_expr_opt(e.condition());
+ let break_expr =
+ self.alloc_expr(Expr::Break { expr: None, label: None }, syntax_ptr.clone());
+ let if_expr = self.alloc_expr(
+ Expr::If { condition, then_branch: body, else_branch: Some(break_expr) },
+ syntax_ptr.clone(),
+ );
+ self.alloc_expr(Expr::Loop { body: if_expr, label }, syntax_ptr)
+ }
+
/// Desugar `ast::ForExpr` from: `[opt_ident]: for <pat> in <head> <body>` into:
/// ```ignore (pseudo-rust)
/// match IntoIterator::into_iter(<head>) {
@@ -781,7 +801,7 @@ impl ExprCollector<'_> {
pat: self.alloc_pat_desugared(some_pat),
guard: None,
expr: self.with_opt_labeled_rib(label, |this| {
- this.collect_expr_opt(e.loop_body().map(|x| x.into()))
+ this.collect_expr_opt(e.loop_body().map(|it| it.into()))
}),
};
let iter_name = Name::generate_new_name();
@@ -874,10 +894,10 @@ impl ExprCollector<'_> {
}),
guard: None,
expr: {
- let x = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
+ let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone());
let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone());
let result = self.alloc_expr(
- Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false },
+ Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false },
syntax_ptr.clone(),
);
self.alloc_expr(
@@ -893,15 +913,14 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
}
- fn collect_macro_call<F, T, U>(
+ fn collect_macro_call<T, U>(
&mut self,
mcall: ast::MacroCall,
syntax_ptr: AstPtr<ast::MacroCall>,
record_diagnostics: bool,
- collector: F,
+ collector: impl FnOnce(&mut Self, Option<T>) -> U,
) -> U
where
- F: FnOnce(&mut Self, Option<T>) -> U,
T: ast::AstNode,
{
// File containing the macro call. Expansion errors will be attached here.
@@ -1240,12 +1259,12 @@ impl ExprCollector<'_> {
pats.push(self.collect_pat(first, binding_list));
binding_list.reject_new = true;
for rest in it {
- for (_, x) in binding_list.is_used.iter_mut() {
- *x = false;
+ for (_, it) in binding_list.is_used.iter_mut() {
+ *it = false;
}
pats.push(self.collect_pat(rest, binding_list));
- for (&id, &x) in binding_list.is_used.iter() {
- if !x {
+ for (&id, &is_used) in binding_list.is_used.iter() {
+ if !is_used {
self.body.bindings[id].problems =
Some(BindingProblems::NotBoundAcrossAll);
}
@@ -1352,9 +1371,9 @@ impl ExprCollector<'_> {
// FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
ast::Pat::RangePat(p) => {
let mut range_part_lower = |p: Option<ast::Pat>| {
- p.and_then(|x| match &x {
- ast::Pat::LiteralPat(x) => {
- Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(x)?.0)))
+ p.and_then(|it| match &it {
+ ast::Pat::LiteralPat(it) => {
+ Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0)))
}
ast::Pat::IdentPat(p) => {
let name =
@@ -1451,9 +1470,7 @@ impl ExprCollector<'_> {
&self,
lifetime: Option<ast::Lifetime>,
) -> Result<Option<LabelId>, BodyDiagnostic> {
- let Some(lifetime) = lifetime else {
- return Ok(None)
- };
+ let Some(lifetime) = lifetime else { return Ok(None) };
let name = Name::new_lifetime(&lifetime);
for (rib_idx, rib) in self.label_ribs.iter().enumerate().rev() {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
index cd6df0e63..5d71abe37 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
@@ -105,7 +105,7 @@ struct Printer<'a> {
needs_indent: bool,
}
-impl<'a> Write for Printer<'a> {
+impl Write for Printer<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
for line in s.split_inclusive('\n') {
if self.needs_indent {
@@ -125,7 +125,7 @@ impl<'a> Write for Printer<'a> {
}
}
-impl<'a> Printer<'a> {
+impl Printer<'_> {
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
self.indent_level += 1;
wln!(self);
@@ -178,14 +178,6 @@ impl<'a> Printer<'a> {
w!(self, "loop ");
self.print_expr(*body);
}
- Expr::While { condition, body, label } => {
- if let Some(lbl) = label {
- w!(self, "{}: ", self.body[*lbl].name.display(self.db));
- }
- w!(self, "while ");
- self.print_expr(*condition);
- self.print_expr(*body);
- }
Expr::Call { callee, args, is_assignee_expr: _ } => {
self.print_expr(*callee);
w!(self, "(");
@@ -634,7 +626,7 @@ impl<'a> Printer<'a> {
match literal {
Literal::String(it) => w!(self, "{:?}", it),
Literal::ByteString(it) => w!(self, "\"{}\"", it.escape_ascii()),
- Literal::CString(it) => w!(self, "\"{}\\0\"", it),
+ Literal::CString(it) => w!(self, "\"{}\\0\"", it.escape_ascii()),
Literal::Char(it) => w!(self, "'{}'", it.escape_debug()),
Literal::Bool(it) => w!(self, "{}", it),
Literal::Int(i, suffix) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
index 69741c445..2a90a09f2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/scope.rs
@@ -228,11 +228,6 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
scopes.set_scope(expr, scope);
compute_block_scopes(statements, *tail, body, scopes, &mut scope);
}
- Expr::While { condition, body: body_expr, label } => {
- let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
- compute_expr_scopes(*condition, body, scopes, &mut scope);
- compute_expr_scopes(*body_expr, body, scopes, &mut scope);
- }
Expr::Loop { body: body_expr, label } => {
let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
compute_expr_scopes(*body_expr, body, scopes, &mut scope);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
index edee2c7ff..d55820116 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs
@@ -3,12 +3,12 @@ mod block;
use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::Expect;
-use crate::ModuleDefId;
+use crate::{test_db::TestDB, ModuleDefId};
use super::*;
fn lower(ra_fixture: &str) -> Arc<Body> {
- let db = crate::test_db::TestDB::with_files(ra_fixture);
+ let db = TestDB::with_files(ra_fixture);
let krate = db.crate_graph().iter().next().unwrap();
let def_map = db.crate_def_map(krate);
@@ -25,15 +25,15 @@ fn lower(ra_fixture: &str) -> Arc<Body> {
db.body(fn_def.unwrap().into())
}
-fn block_def_map_at(ra_fixture: &str) -> String {
- let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+fn def_map_at(ra_fixture: &str) -> String {
+ let (db, position) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(position);
module.def_map(&db).dump(&db)
}
fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
- let (db, position) = crate::test_db::TestDB::with_position(ra_fixture);
+ let (db, position) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(position);
let actual = module.def_map(&db).dump_block_scopes(&db);
@@ -41,7 +41,7 @@ fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
}
fn check_at(ra_fixture: &str, expect: Expect) {
- let actual = block_def_map_at(ra_fixture);
+ let actual = def_map_at(ra_fixture);
expect.assert_eq(&actual);
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
index 6e77744f2..4e015a7fb 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs
@@ -134,6 +134,47 @@ struct Struct {}
}
#[test]
+fn super_imports_2() {
+ check_at(
+ r#"
+fn outer() {
+ mod m {
+ struct ResolveMe {}
+ fn middle() {
+ mod m2 {
+ fn inner() {
+ use super::ResolveMe;
+ $0
+ }
+ }
+ }
+ }
+}
+"#,
+ expect![[r#"
+ block scope
+ ResolveMe: t
+
+ block scope
+ m2: t
+
+ block scope::m2
+ inner: v
+
+ block scope
+ m: t
+
+ block scope::m
+ ResolveMe: t
+ middle: v
+
+ crate
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
fn nested_module_scoping() {
check_block_scopes_at(
r#"
@@ -156,6 +197,42 @@ fn f() {
}
#[test]
+fn self_imports() {
+ check_at(
+ r#"
+fn f() {
+ mod m {
+ struct ResolveMe {}
+ fn g() {
+ fn h() {
+ use self::ResolveMe;
+ $0
+ }
+ }
+ }
+}
+"#,
+ expect![[r#"
+ block scope
+ ResolveMe: t
+
+ block scope
+ h: v
+
+ block scope
+ m: t
+
+ block scope::m
+ ResolveMe: t
+ g: v
+
+ crate
+ f: v
+ "#]],
+ );
+}
+
+#[test]
fn legacy_macro_items() {
// Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded
// correctly.
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
index bb79e28f2..4cfd318a4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -14,8 +14,8 @@ use crate::{
item_scope::ItemScope,
nameres::DefMap,
src::{HasChildSource, HasSource},
- AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, FieldId, ImplId, Lookup, MacroId,
- ModuleDefId, ModuleId, TraitId, VariantId,
+ AdtId, AssocItemId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FieldId, ImplId,
+ Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId, VariantId,
};
pub trait ChildBySource {
@@ -91,6 +91,8 @@ impl ChildBySource for ItemScope {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
self.impls().for_each(|imp| add_impl(db, res, file_id, imp));
+ self.extern_crate_decls().for_each(|ext| add_extern_crate(db, res, file_id, ext));
+ self.use_decls().for_each(|ext| add_use(db, res, file_id, ext));
self.unnamed_consts().for_each(|konst| {
let loc = konst.lookup(db);
if loc.id.file_id() == file_id {
@@ -167,6 +169,23 @@ impl ChildBySource for ItemScope {
map[keys::IMPL].insert(loc.source(db).value, imp)
}
}
+ fn add_extern_crate(
+ db: &dyn DefDatabase,
+ map: &mut DynMap,
+ file_id: HirFileId,
+ ext: ExternCrateId,
+ ) {
+ let loc = ext.lookup(db);
+ if loc.id.file_id() == file_id {
+ map[keys::EXTERN_CRATE].insert(loc.source(db).value, ext)
+ }
+ }
+ fn add_use(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, ext: UseId) {
+ let loc = ext.lookup(db);
+ if loc.id.file_id() == file_id {
+ map[keys::USE].insert(loc.source(db).value, ext)
+ }
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
index 40e6a4308..91db68058 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -2,6 +2,7 @@
pub mod adt;
+use base_db::CrateId;
use hir_expand::{
name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefKind,
};
@@ -24,11 +25,12 @@ use crate::{
proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind},
DefMap, MacroSubNs,
},
+ path::ImportAlias,
type_ref::{TraitRef, TypeBound, TypeRef},
visibility::RawVisibility,
- AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
- Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
- StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
+ AssocItemId, AstIdWithPath, ConstId, ConstLoc, ExternCrateId, FunctionId, FunctionLoc,
+ HasModule, ImplId, Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId,
+ ProcMacroId, StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -424,6 +426,7 @@ impl MacroRulesData {
Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
}
}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProcMacroData {
pub name: Name,
@@ -461,6 +464,42 @@ impl ProcMacroData {
}
#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ExternCrateDeclData {
+ pub name: Name,
+ pub alias: Option<ImportAlias>,
+ pub visibility: RawVisibility,
+ pub crate_id: Option<CrateId>,
+}
+
+impl ExternCrateDeclData {
+ pub(crate) fn extern_crate_decl_data_query(
+ db: &dyn DefDatabase,
+ extern_crate: ExternCrateId,
+ ) -> Arc<ExternCrateDeclData> {
+ let loc = extern_crate.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let extern_crate = &item_tree[loc.id.value];
+
+ let name = extern_crate.name.clone();
+ let crate_id = if name == hir_expand::name![self] {
+ Some(loc.container.krate())
+ } else {
+ db.crate_def_map(loc.container.krate())
+ .extern_prelude()
+ .find(|&(prelude_name, ..)| *prelude_name == name)
+ .map(|(_, root)| root.krate())
+ };
+
+ Arc::new(Self {
+ name: extern_crate.name.clone(),
+ visibility: item_tree[extern_crate.visibility].clone(),
+ alias: extern_crate.alias.clone(),
+ crate_id,
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ConstData {
/// `None` for `const _: () = ();`
pub name: Option<Name>,
@@ -573,7 +612,7 @@ impl<'a> AssocItemCollector<'a> {
if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
self.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id.local_id,
- InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()),
+ InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).erase()),
attrs.cfg().unwrap(),
self.expander.cfg_options().clone(),
));
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
index 6db5abccc..c8df3f3f9 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
@@ -18,7 +18,6 @@ use triomphe::Arc;
use crate::{
builtin_type::{BuiltinInt, BuiltinUint},
db::DefDatabase,
- expander::CfgExpander,
item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId},
lang_item::LangItem,
lower::LowerCtx,
@@ -29,8 +28,8 @@ use crate::{
tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree},
type_ref::TypeRef,
visibility::RawVisibility,
- EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId,
- VariantId,
+ EnumId, EnumLoc, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId,
+ UnionId, VariantId,
};
/// Note that we use `StructData` for unions as well!
@@ -76,6 +75,7 @@ pub struct EnumData {
pub struct EnumVariantData {
pub name: Name,
pub variant_data: Arc<VariantData>,
+ pub tree_id: la_arena::Idx<crate::item_tree::Variant>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -147,6 +147,7 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
}
"C" => ReprFlags::IS_C,
"transparent" => ReprFlags::IS_TRANSPARENT,
+ "simd" => ReprFlags::IS_SIMD,
repr => {
if let Some(builtin) = BuiltinInt::from_suffix(repr)
.map(Either::Left)
@@ -325,11 +326,12 @@ impl EnumData {
variants.alloc(EnumVariantData {
name: var.name.clone(),
variant_data: Arc::new(var_data),
+ tree_id,
});
} else {
diagnostics.push(DefDiagnostic::unconfigured_code(
loc.container.local_id,
- InFile::new(loc.id.file_id(), var.ast_id.upcast()),
+ InFile::new(loc.id.file_id(), var.ast_id.erase()),
attrs.cfg().unwrap(),
cfg_options.clone(),
))
@@ -367,9 +369,10 @@ impl HasChildSource<LocalEnumVariantId> for EnumId {
&self,
db: &dyn DefDatabase,
) -> InFile<ArenaMap<LocalEnumVariantId, Self::Value>> {
- let src = self.lookup(db).source(db);
+ let loc = &self.lookup(db);
+ let src = loc.source(db);
let mut trace = Trace::new_for_map();
- lower_enum(db, &mut trace, &src, self.lookup(db).container);
+ lower_enum(db, &mut trace, &src, loc);
src.with_value(trace.into_map())
}
}
@@ -378,31 +381,58 @@ fn lower_enum(
db: &dyn DefDatabase,
trace: &mut Trace<EnumVariantData, ast::Variant>,
ast: &InFile<ast::Enum>,
- module_id: ModuleId,
+ loc: &EnumLoc,
) {
- let expander = CfgExpander::new(db, ast.file_id, module_id.krate);
+ let item_tree = loc.id.item_tree(db);
+ let krate = loc.container.krate;
+
+ let item_tree_variants = item_tree[loc.id.value].variants.clone();
+
+ let cfg_options = &db.crate_graph()[krate].cfg_options;
let variants = ast
.value
.variant_list()
.into_iter()
.flat_map(|it| it.variants())
- .filter(|var| expander.is_cfg_enabled(db, var));
- for var in variants {
+ .zip(item_tree_variants)
+ .filter(|&(_, item_tree_id)| {
+ item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options)
+ });
+ for (var, item_tree_id) in variants {
trace.alloc(
|| var.clone(),
|| EnumVariantData {
name: var.name().map_or_else(Name::missing, |it| it.as_name()),
- variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)),
+ variant_data: Arc::new(VariantData::new(
+ db,
+ ast.with_value(var.kind()),
+ loc.container,
+ &item_tree,
+ item_tree_id,
+ )),
+ tree_id: item_tree_id,
},
);
}
}
impl VariantData {
- fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>, module_id: ModuleId) -> Self {
- let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate);
+ fn new(
+ db: &dyn DefDatabase,
+ flavor: InFile<ast::StructKind>,
+ module_id: ModuleId,
+ item_tree: &ItemTree,
+ variant: la_arena::Idx<crate::item_tree::Variant>,
+ ) -> Self {
let mut trace = Trace::new_for_arena();
- match lower_struct(db, &mut expander, &mut trace, &flavor) {
+ match lower_struct(
+ db,
+ &mut trace,
+ &flavor,
+ module_id.krate,
+ item_tree,
+ &item_tree[variant].fields,
+ ) {
StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
StructKind::Record => VariantData::Record(trace.into_arena()),
StructKind::Unit => VariantData::Unit,
@@ -434,28 +464,43 @@ impl HasChildSource<LocalFieldId> for VariantId {
type Value = Either<ast::TupleField, ast::RecordField>;
fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> {
- let (src, module_id) = match self {
+ let item_tree;
+ let (src, fields, container) = match *self {
VariantId::EnumVariantId(it) => {
// I don't really like the fact that we call into parent source
// here, this might add to more queries then necessary.
+ let lookup = it.parent.lookup(db);
+ item_tree = lookup.id.item_tree(db);
let src = it.parent.child_source(db);
- (src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container)
+ let tree_id = db.enum_data(it.parent).variants[it.local_id].tree_id;
+ let fields = &item_tree[tree_id].fields;
+ (src.map(|map| map[it.local_id].kind()), fields, lookup.container)
}
VariantId::StructId(it) => {
- (it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container)
+ let lookup = it.lookup(db);
+ item_tree = lookup.id.item_tree(db);
+ (
+ lookup.source(db).map(|it| it.kind()),
+ &item_tree[lookup.id.value].fields,
+ lookup.container,
+ )
+ }
+ VariantId::UnionId(it) => {
+ let lookup = it.lookup(db);
+ item_tree = lookup.id.item_tree(db);
+ (
+ lookup.source(db).map(|it| {
+ it.record_field_list()
+ .map(ast::StructKind::Record)
+ .unwrap_or(ast::StructKind::Unit)
+ }),
+ &item_tree[lookup.id.value].fields,
+ lookup.container,
+ )
}
- VariantId::UnionId(it) => (
- it.lookup(db).source(db).map(|it| {
- it.record_field_list()
- .map(ast::StructKind::Record)
- .unwrap_or(ast::StructKind::Unit)
- }),
- it.lookup(db).container,
- ),
};
- let mut expander = CfgExpander::new(db, src.file_id, module_id.krate);
let mut trace = Trace::new_for_map();
- lower_struct(db, &mut expander, &mut trace, &src);
+ lower_struct(db, &mut trace, &src, container.krate, &item_tree, fields);
src.with_value(trace.into_map())
}
}
@@ -469,16 +514,19 @@ pub enum StructKind {
fn lower_struct(
db: &dyn DefDatabase,
- expander: &mut CfgExpander,
trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>,
ast: &InFile<ast::StructKind>,
+ krate: CrateId,
+ item_tree: &ItemTree,
+ fields: &Fields,
) -> StructKind {
- let ctx = LowerCtx::new(db, &expander.hygiene(), ast.file_id);
+ let ctx = LowerCtx::with_file_id(db, ast.file_id);
- match &ast.value {
- ast::StructKind::Tuple(fl) => {
- for (i, fd) in fl.fields().enumerate() {
- if !expander.is_cfg_enabled(db, &fd) {
+ match (&ast.value, fields) {
+ (ast::StructKind::Tuple(fl), Fields::Tuple(fields)) => {
+ let cfg_options = &db.crate_graph()[krate].cfg_options;
+ for ((i, fd), item_tree_id) in fl.fields().enumerate().zip(fields.clone()) {
+ if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) {
continue;
}
@@ -493,9 +541,10 @@ fn lower_struct(
}
StructKind::Tuple
}
- ast::StructKind::Record(fl) => {
- for fd in fl.fields() {
- if !expander.is_cfg_enabled(db, &fd) {
+ (ast::StructKind::Record(fl), Fields::Record(fields)) => {
+ let cfg_options = &db.crate_graph()[krate].cfg_options;
+ for (fd, item_tree_id) in fl.fields().zip(fields.clone()) {
+ if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) {
continue;
}
@@ -510,7 +559,7 @@ fn lower_struct(
}
StructKind::Record
}
- ast::StructKind::Unit => StructKind::Unit,
+ _ => StructKind::Unit,
}
}
@@ -539,8 +588,8 @@ fn lower_fields(
InFile::new(
current_file_id,
match field.ast_id {
- FieldAstId::Record(it) => it.upcast(),
- FieldAstId::Tuple(it) => it.upcast(),
+ FieldAstId::Record(it) => it.erase(),
+ FieldAstId::Tuple(it) => it.erase(),
},
),
attrs.cfg().unwrap(),
@@ -563,8 +612,8 @@ fn lower_fields(
InFile::new(
current_file_id,
match field.ast_id {
- FieldAstId::Record(it) => it.upcast(),
- FieldAstId::Tuple(it) => it.upcast(),
+ FieldAstId::Record(it) => it.erase(),
+ FieldAstId::Tuple(it) => it.erase(),
},
),
attrs.cfg().unwrap(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index 04ec47f84..e34a6768f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -12,27 +12,31 @@ use crate::{
body::{scope::ExprScopes, Body, BodySourceMap},
data::{
adt::{EnumData, StructData},
- ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
- TraitAliasData, TraitData, TypeAliasData,
+ ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData,
+ ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData,
},
generics::GenericParams,
import_map::ImportMap,
item_tree::{AttrOwner, ItemTree},
- lang_item::{LangItem, LangItemTarget, LangItems},
+ lang_item::{self, LangItem, LangItemTarget, LangItems},
nameres::{diagnostics::DefDiagnostic, DefMap},
visibility::{self, Visibility},
AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId,
- EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId,
- ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc,
- MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId,
- StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId,
- UnionLoc, VariantId,
+ EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId,
+ FunctionLoc, GenericDefId, ImplId, ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId,
+ LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
+ StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc,
+ TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, UseId, UseLoc, VariantId,
};
#[salsa::query_group(InternDatabaseStorage)]
pub trait InternDatabase: SourceDatabase {
// region: items
#[salsa::interned]
+ fn intern_use(&self, loc: UseLoc) -> UseId;
+ #[salsa::interned]
+ fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
+ #[salsa::interned]
fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
#[salsa::interned]
fn intern_struct(&self, loc: StructLoc) -> StructId;
@@ -160,6 +164,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(ProcMacroData::proc_macro_data_query)]
fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
+ #[salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)]
+ fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc<ExternCrateDeclData>;
+
// endregion:data
#[salsa::invoke(Body::body_with_source_map_query)]
@@ -197,6 +204,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
#[salsa::invoke(AttrsWithOwner::attrs_query)]
fn attrs(&self, def: AttrDefId) -> Attrs;
+ #[salsa::invoke(lang_item::lang_attr_query)]
+ fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
+
#[salsa::transparent]
#[salsa::invoke(AttrsWithOwner::attrs_with_owner)]
fn attrs_with_owner(&self, def: AttrDefId) -> AttrsWithOwner;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs
index f30be6b64..d0f2bfab4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs
@@ -8,9 +8,9 @@ use syntax::{ast, AstNode, AstPtr};
use crate::{
dyn_map::{DynMap, Policy},
- ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
- MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
- TypeOrConstParamId, UnionId,
+ ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId, LifetimeParamId,
+ Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
+ TypeOrConstParamId, UnionId, UseId,
};
pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
@@ -25,6 +25,8 @@ pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
pub const UNION: Key<ast::Union, UnionId> = Key::new();
pub const ENUM: Key<ast::Enum, EnumId> = Key::new();
+pub const EXTERN_CRATE: Key<ast::ExternCrate, ExternCrateId> = Key::new();
+pub const USE: Key<ast::Use, UseId> = Key::new();
pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new();
pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
index a588827c8..6db8398bc 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs
@@ -15,18 +15,11 @@ use crate::{
MacroId, ModuleId,
};
-/// A subset of Expander that only deals with cfg attributes. We only need it to
-/// avoid cyclic queries in crate def map during enum processing.
#[derive(Debug)]
-pub(crate) struct CfgExpander {
+pub struct Expander {
cfg_options: CfgOptions,
hygiene: Hygiene,
krate: CrateId,
-}
-
-#[derive(Debug)]
-pub struct Expander {
- cfg_expander: CfgExpander,
pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId,
/// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached.
@@ -34,41 +27,23 @@ pub struct Expander {
recursion_limit: Limit,
}
-impl CfgExpander {
- pub(crate) fn new(
- db: &dyn DefDatabase,
- current_file_id: HirFileId,
- krate: CrateId,
- ) -> CfgExpander {
- let hygiene = Hygiene::new(db.upcast(), current_file_id);
- let cfg_options = db.crate_graph()[krate].cfg_options.clone();
- CfgExpander { cfg_options, hygiene, krate }
- }
-
- pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
- Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
- }
-
- pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool {
- let attrs = self.parse_attrs(db, owner);
- attrs.is_cfg_enabled(&self.cfg_options)
- }
-
- pub(crate) fn hygiene(&self) -> &Hygiene {
- &self.hygiene
- }
-}
-
impl Expander {
pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander {
- let cfg_expander = CfgExpander::new(db, current_file_id, module.krate);
let recursion_limit = db.recursion_limit(module.krate);
#[cfg(not(test))]
let recursion_limit = Limit::new(recursion_limit as usize);
// Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
#[cfg(test)]
let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize));
- Expander { cfg_expander, current_file_id, module, recursion_depth: 0, recursion_limit }
+ Expander {
+ current_file_id,
+ module,
+ recursion_depth: 0,
+ recursion_limit,
+ cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
+ hygiene: Hygiene::new(db.upcast(), current_file_id),
+ krate: module.krate,
+ }
}
pub fn enter_expand<T: ast::AstNode>(
@@ -120,7 +95,7 @@ impl Expander {
}
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
- self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
+ self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@@ -135,7 +110,7 @@ impl Expander {
}
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
- LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id)
+ LowerCtx::new(db, &self.hygiene, self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@@ -143,11 +118,11 @@ impl Expander {
}
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
- self.cfg_expander.parse_attrs(db, owner)
+ Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
}
pub(crate) fn cfg_options(&self) -> &CfgOptions {
- &self.cfg_expander.cfg_options
+ &self.cfg_options
}
pub fn current_file_id(&self) -> HirFileId {
@@ -155,7 +130,7 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
- let ctx = LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id);
+ let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
Path::from_src(path, &ctx)
}
@@ -189,18 +164,26 @@ impl Expander {
return ExpandResult { value: None, err };
};
- Self::enter_expand_inner(db, call_id, err).map(|value| {
- value.and_then(|InFile { file_id, value }| {
- let parse = value.cast::<T>()?;
-
- self.recursion_depth += 1;
- self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
- let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
- let mark =
- Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") };
- Some((mark, parse))
- })
- })
+ let res = Self::enter_expand_inner(db, call_id, err);
+ match res.err {
+ // If proc-macro is disabled or unresolved, we want to expand to a missing expression
+ // instead of an empty tree which might end up in an empty block.
+ Some(ExpandError::UnresolvedProcMacro(_)) => res.map(|_| None),
+ _ => res.map(|value| {
+ value.and_then(|InFile { file_id, value }| {
+ let parse = value.cast::<T>()?;
+
+ self.recursion_depth += 1;
+ self.hygiene = Hygiene::new(db.upcast(), file_id);
+ let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
+ let mark = Mark {
+ file_id: old_file_id,
+ bomb: DropBomb::new("expansion mark dropped"),
+ };
+ Some((mark, parse))
+ })
+ }),
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
index 8c49ae1c4..df2af4c89 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -360,7 +360,7 @@ fn calculate_best_path(
prefer_no_std,
)?;
cov_mark::hit!(partially_imported);
- path.push_segment(info.path.segments.last()?.clone());
+ path.push_segment(info.name.clone());
Some(path)
})
});
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
index f19c3f028..d7d44e413 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -67,21 +67,21 @@ pub enum TypeOrConstParamData {
impl TypeOrConstParamData {
pub fn name(&self) -> Option<&Name> {
match self {
- TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(),
- TypeOrConstParamData::ConstParamData(x) => Some(&x.name),
+ TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(),
+ TypeOrConstParamData::ConstParamData(it) => Some(&it.name),
}
}
pub fn has_default(&self) -> bool {
match self {
- TypeOrConstParamData::TypeParamData(x) => x.default.is_some(),
- TypeOrConstParamData::ConstParamData(x) => x.has_default,
+ TypeOrConstParamData::TypeParamData(it) => it.default.is_some(),
+ TypeOrConstParamData::ConstParamData(it) => it.has_default,
}
}
pub fn type_param(&self) -> Option<&TypeParamData> {
match self {
- TypeOrConstParamData::TypeParamData(x) => Some(x),
+ TypeOrConstParamData::TypeParamData(it) => Some(it),
TypeOrConstParamData::ConstParamData(_) => None,
}
}
@@ -89,14 +89,14 @@ impl TypeOrConstParamData {
pub fn const_param(&self) -> Option<&ConstParamData> {
match self {
TypeOrConstParamData::TypeParamData(_) => None,
- TypeOrConstParamData::ConstParamData(x) => Some(x),
+ TypeOrConstParamData::ConstParamData(it) => Some(it),
}
}
pub fn is_trait_self(&self) -> bool {
match self {
- TypeOrConstParamData::TypeParamData(x) => {
- x.provenance == TypeParamProvenance::TraitSelf
+ TypeOrConstParamData::TypeParamData(it) => {
+ it.provenance == TypeParamProvenance::TraitSelf
}
TypeOrConstParamData::ConstParamData(_) => false,
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
index 500e88006..6591c92ac 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs
@@ -85,7 +85,7 @@ impl fmt::Display for FloatTypeWrapper {
pub enum Literal {
String(Box<str>),
ByteString(Box<[u8]>),
- CString(Box<str>),
+ CString(Box<[u8]>),
Char(char),
Bool(bool),
Int(i128, Option<BuiltinInt>),
@@ -191,11 +191,6 @@ pub enum Expr {
body: ExprId,
label: Option<LabelId>,
},
- While {
- condition: ExprId,
- body: ExprId,
- label: Option<LabelId>,
- },
Call {
callee: ExprId,
args: Box<[ExprId]>,
@@ -379,10 +374,6 @@ impl Expr {
}
}
Expr::Loop { body, .. } => f(*body),
- Expr::While { condition, body, .. } => {
- f(*condition);
- f(*body);
- }
Expr::Call { callee, args, .. } => {
f(*callee);
args.iter().copied().for_each(f);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
index fa1f4933a..57f023ef3 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs
@@ -425,8 +425,8 @@ impl ConstRef {
}
match expr {
ast::Expr::PathExpr(p) if is_path_ident(&p) => {
- match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) {
- Some(x) => Self::Path(x.as_name()),
+ match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) {
+ Some(it) => Self::Path(it.as_name()),
None => Self::Scalar(LiteralConstRef::Unknown),
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index 48532655e..4b2e5041a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -1,13 +1,14 @@
//! A map of all publicly exported items in a crate.
+use std::collections::hash_map::Entry;
use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId;
use fst::{self, Streamer};
use hir_expand::name::Name;
-use indexmap::{map::Entry, IndexMap};
+use indexmap::IndexMap;
use itertools::Itertools;
-use rustc_hash::{FxHashSet, FxHasher};
+use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use triomphe::Arc;
use crate::{
@@ -17,52 +18,23 @@ use crate::{
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
+// FIXME: Support aliases: an item may be exported under multiple names, so `ImportInfo` should
+// have `Vec<(Name, ModuleId)>` instead of `(Name, ModuleId)`.
/// Item import details stored in the `ImportMap`.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ImportInfo {
- /// A path that can be used to import the item, relative to the crate's root.
- pub path: ImportPath,
+ /// A name that can be used to import the item, relative to the crate's root.
+ pub name: Name,
/// The module containing this item.
pub container: ModuleId,
/// Whether the import is a trait associated item or not.
pub is_trait_assoc_item: bool,
}
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct ImportPath {
- pub segments: Vec<Name>,
-}
-
-impl ImportPath {
- pub fn display<'a>(&'a self, db: &'a dyn DefDatabase) -> impl fmt::Display + 'a {
- struct Display<'a> {
- db: &'a dyn DefDatabase,
- path: &'a ImportPath,
- }
- impl fmt::Display for Display<'_> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(
- &self.path.segments.iter().map(|it| it.display(self.db.upcast())).format("::"),
- f,
- )
- }
- }
- Display { db, path: self }
- }
-
- fn len(&self) -> usize {
- self.segments.len()
- }
-}
-
-/// A map from publicly exported items to the path needed to import/name them from a downstream
-/// crate.
+/// A map from publicly exported items to its name.
///
/// Reexports of items are taken into account, ie. if something is exported under multiple
/// names, the one with the shortest import path will be used.
-///
-/// Note that all paths are relative to the containing crate's root, so the crate name still needs
-/// to be prepended to the `ModPath` before the path is valid.
#[derive(Default)]
pub struct ImportMap {
map: FxIndexMap<ItemInNs, ImportInfo>,
@@ -70,122 +42,58 @@ pub struct ImportMap {
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the
/// values returned by running `fst`.
///
- /// Since a path can refer to multiple items due to namespacing, we store all items with the
- /// same path right after each other. This allows us to find all items after the FST gives us
+ /// Since a name can refer to multiple items due to namespacing, we store all items with the
+ /// same name right after each other. This allows us to find all items after the FST gives us
/// the index of the first one.
importables: Vec<ItemInNs>,
fst: fst::Map<Vec<u8>>,
}
impl ImportMap {
- pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
+ pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query");
- let mut import_map = collect_import_map(db, krate);
+ let map = collect_import_map(db, krate);
- let mut importables = import_map
- .map
+ let mut importables: Vec<_> = map
.iter()
- .map(|(item, info)| (item, fst_path(db, &info.path)))
- .collect::<Vec<_>>();
- importables.sort_by(|(_, fst_path), (_, fst_path2)| fst_path.cmp(fst_path2));
+ // We've only collected items, whose name cannot be tuple field.
+ .map(|(&item, info)| (item, info.name.as_str().unwrap().to_ascii_lowercase()))
+ .collect();
+ importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name));
// Build the FST, taking care not to insert duplicate values.
-
let mut builder = fst::MapBuilder::memory();
- let mut last_batch_start = 0;
-
- for idx in 0..importables.len() {
- let key = &importables[last_batch_start].1;
- if let Some((_, fst_path)) = importables.get(idx + 1) {
- if key == fst_path {
- continue;
- }
- }
-
- let _ = builder.insert(key, last_batch_start as u64);
-
- last_batch_start = idx + 1;
+ let iter = importables.iter().enumerate().dedup_by(|lhs, rhs| lhs.1 .1 == rhs.1 .1);
+ for (start_idx, (_, name)) in iter {
+ let _ = builder.insert(name, start_idx as u64);
}
- import_map.fst = builder.into_map();
- import_map.importables = importables.iter().map(|&(&item, _)| item).collect();
-
- Arc::new(import_map)
- }
-
- /// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root.
- pub fn path_of(&self, item: ItemInNs) -> Option<&ImportPath> {
- self.import_info_for(item).map(|it| &it.path)
+ Arc::new(ImportMap {
+ map,
+ fst: builder.into_map(),
+ importables: importables.into_iter().map(|(item, _)| item).collect(),
+ })
}
pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> {
self.map.get(&item)
}
-
- #[cfg(test)]
- fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
- let mut importable_paths: Vec<_> = self
- .map
- .iter()
- .map(|(item, info)| {
- let ns = match item {
- ItemInNs::Types(_) => "t",
- ItemInNs::Values(_) => "v",
- ItemInNs::Macros(_) => "m",
- };
- format!("- {} ({ns})", info.path.display(db))
- })
- .collect();
-
- importable_paths.sort();
- importable_paths.join("\n")
- }
-
- fn collect_trait_assoc_items(
- &mut self,
- db: &dyn DefDatabase,
- tr: TraitId,
- is_type_in_ns: bool,
- original_import_info: &ImportInfo,
- ) {
- let _p = profile::span("collect_trait_assoc_items");
- for (assoc_item_name, item) in &db.trait_data(tr).items {
- let module_def_id = match item {
- AssocItemId::FunctionId(f) => ModuleDefId::from(*f),
- AssocItemId::ConstId(c) => ModuleDefId::from(*c),
- // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
- // qualifier, ergo no need to store it for imports in import_map
- AssocItemId::TypeAliasId(_) => {
- cov_mark::hit!(type_aliases_ignored);
- continue;
- }
- };
- let assoc_item = if is_type_in_ns {
- ItemInNs::Types(module_def_id)
- } else {
- ItemInNs::Values(module_def_id)
- };
-
- let mut assoc_item_info = original_import_info.clone();
- assoc_item_info.path.segments.push(assoc_item_name.to_owned());
- assoc_item_info.is_trait_assoc_item = true;
- self.map.insert(assoc_item, assoc_item_info);
- }
- }
}
-fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap {
+fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemInNs, ImportInfo> {
let _p = profile::span("collect_import_map");
let def_map = db.crate_def_map(krate);
- let mut import_map = ImportMap::default();
+ let mut map = FxIndexMap::default();
// We look only into modules that are public(ly reexported), starting with the crate root.
- let empty = ImportPath { segments: vec![] };
let root = def_map.module_id(DefMap::ROOT);
- let mut worklist = vec![(root, empty)];
- while let Some((module, mod_path)) = worklist.pop() {
+ let mut worklist = vec![(root, 0)];
+ // Records items' minimum module depth.
+ let mut depth_map = FxHashMap::default();
+
+ while let Some((module, depth)) = worklist.pop() {
let ext_def_map;
let mod_data = if module.krate == krate {
&def_map[module.local_id]
@@ -201,52 +109,83 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap {
});
for (name, per_ns) in visible_items {
- let mk_path = || {
- let mut path = mod_path.clone();
- path.segments.push(name.clone());
- path
- };
-
for item in per_ns.iter_items() {
- let path = mk_path();
- let path_len = path.len();
- let import_info =
- ImportInfo { path, container: module, is_trait_assoc_item: false };
-
- if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
- import_map.collect_trait_assoc_items(
- db,
- tr,
- matches!(item, ItemInNs::Types(_)),
- &import_info,
- );
- }
+ let import_info = ImportInfo {
+ name: name.clone(),
+ container: module,
+ is_trait_assoc_item: false,
+ };
- match import_map.map.entry(item) {
+ match depth_map.entry(item) {
Entry::Vacant(entry) => {
- entry.insert(import_info);
+ entry.insert(depth);
}
Entry::Occupied(mut entry) => {
- // If the new path is shorter, prefer that one.
- if path_len < entry.get().path.len() {
- *entry.get_mut() = import_info;
+ if depth < *entry.get() {
+ entry.insert(depth);
} else {
continue;
}
}
}
- // If we've just added a path to a module, descend into it. We might traverse
- // modules multiple times, but only if the new path to it is shorter than the
- // first (else we `continue` above).
+ if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
+ collect_trait_assoc_items(
+ db,
+ &mut map,
+ tr,
+ matches!(item, ItemInNs::Types(_)),
+ &import_info,
+ );
+ }
+
+ map.insert(item, import_info);
+
+ // If we've just added a module, descend into it. We might traverse modules
+ // multiple times, but only if the module depth is smaller (else we `continue`
+ // above).
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
- worklist.push((mod_id, mk_path()));
+ worklist.push((mod_id, depth + 1));
}
}
}
}
- import_map
+ map
+}
+
+fn collect_trait_assoc_items(
+ db: &dyn DefDatabase,
+ map: &mut FxIndexMap<ItemInNs, ImportInfo>,
+ tr: TraitId,
+ is_type_in_ns: bool,
+ trait_import_info: &ImportInfo,
+) {
+ let _p = profile::span("collect_trait_assoc_items");
+ for (assoc_item_name, item) in &db.trait_data(tr).items {
+ let module_def_id = match item {
+ AssocItemId::FunctionId(f) => ModuleDefId::from(*f),
+ AssocItemId::ConstId(c) => ModuleDefId::from(*c),
+ // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
+ // qualifier, ergo no need to store it for imports in import_map
+ AssocItemId::TypeAliasId(_) => {
+ cov_mark::hit!(type_aliases_ignored);
+ continue;
+ }
+ };
+ let assoc_item = if is_type_in_ns {
+ ItemInNs::Types(module_def_id)
+ } else {
+ ItemInNs::Values(module_def_id)
+ };
+
+ let assoc_item_info = ImportInfo {
+ container: trait_import_info.container,
+ name: assoc_item_name.clone(),
+ is_trait_assoc_item: true,
+ };
+ map.insert(assoc_item, assoc_item_info);
+ }
}
impl PartialEq for ImportMap {
@@ -260,7 +199,7 @@ impl Eq for ImportMap {}
impl fmt::Debug for ImportMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut importable_paths: Vec<_> = self
+ let mut importable_names: Vec<_> = self
.map
.iter()
.map(|(item, _)| match item {
@@ -270,56 +209,40 @@ impl fmt::Debug for ImportMap {
})
.collect();
- importable_paths.sort();
- f.write_str(&importable_paths.join("\n"))
+ importable_names.sort();
+ f.write_str(&importable_names.join("\n"))
}
}
-fn fst_path(db: &dyn DefDatabase, path: &ImportPath) -> String {
- let _p = profile::span("fst_path");
- let mut s = path.display(db).to_string();
- s.make_ascii_lowercase();
- s
-}
-
-#[derive(Debug, Eq, PartialEq, Hash)]
-pub enum ImportKind {
- Module,
- Function,
- Adt,
- EnumVariant,
- Const,
- Static,
- Trait,
- TraitAlias,
- TypeAlias,
- BuiltinType,
- AssociatedItem,
- Macro,
-}
-
/// A way to match import map contents against the search query.
#[derive(Debug)]
-pub enum SearchMode {
+enum SearchMode {
/// Import map entry should strictly match the query string.
- Equals,
- /// Import map entry should contain the query string.
- Contains,
+ Exact,
/// Import map entry should contain all letters from the query string,
/// in the same order, but not necessary adjacent.
Fuzzy,
}
+/// Three possible ways to search for the name in associated and/or other items.
+#[derive(Debug, Clone, Copy)]
+pub enum AssocSearchMode {
+ /// Search for the name in both associated and other items.
+ Include,
+ /// Search for the name in other items only.
+ Exclude,
+ /// Search for the name in the associated items only.
+ AssocItemsOnly,
+}
+
#[derive(Debug)]
pub struct Query {
query: String,
lowercased: String,
- name_only: bool,
- assoc_items_only: bool,
search_mode: SearchMode,
+ assoc_mode: AssocSearchMode,
case_sensitive: bool,
limit: usize,
- exclude_import_kinds: FxHashSet<ImportKind>,
}
impl Query {
@@ -328,30 +251,21 @@ impl Query {
Self {
query,
lowercased,
- name_only: false,
- assoc_items_only: false,
- search_mode: SearchMode::Contains,
+ search_mode: SearchMode::Exact,
+ assoc_mode: AssocSearchMode::Include,
case_sensitive: false,
- limit: usize::max_value(),
- exclude_import_kinds: FxHashSet::default(),
+ limit: usize::MAX,
}
}
- /// Matches entries' names only, ignoring the rest of
- /// the qualifier.
- /// Example: for `std::marker::PhantomData`, the name is `PhantomData`.
- pub fn name_only(self) -> Self {
- Self { name_only: true, ..self }
- }
-
- /// Matches only the entries that are associated items, ignoring the rest.
- pub fn assoc_items_only(self) -> Self {
- Self { assoc_items_only: true, ..self }
+ /// Fuzzy finds items instead of exact matching.
+ pub fn fuzzy(self) -> Self {
+ Self { search_mode: SearchMode::Fuzzy, ..self }
}
- /// Specifies the way to search for the entries using the query.
- pub fn search_mode(self, search_mode: SearchMode) -> Self {
- Self { search_mode, ..self }
+ /// Specifies whether we want to include associated items in the result.
+ pub fn assoc_search_mode(self, assoc_mode: AssocSearchMode) -> Self {
+ Self { assoc_mode, ..self }
}
/// Limits the returned number of items to `limit`.
@@ -364,12 +278,6 @@ impl Query {
Self { case_sensitive: true, ..self }
}
- /// Do not include imports of the specified kind in the search results.
- pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self {
- self.exclude_import_kinds.insert(import_kind);
- self
- }
-
fn import_matches(
&self,
db: &dyn DefDatabase,
@@ -377,49 +285,36 @@ impl Query {
enforce_lowercase: bool,
) -> bool {
let _p = profile::span("import_map::Query::import_matches");
- if import.is_trait_assoc_item {
- if self.exclude_import_kinds.contains(&ImportKind::AssociatedItem) {
- return false;
- }
- } else if self.assoc_items_only {
- return false;
+ match (import.is_trait_assoc_item, self.assoc_mode) {
+ (true, AssocSearchMode::Exclude) => return false,
+ (false, AssocSearchMode::AssocItemsOnly) => return false,
+ _ => {}
}
- let mut input = if import.is_trait_assoc_item || self.name_only {
- import.path.segments.last().unwrap().display(db.upcast()).to_string()
- } else {
- import.path.display(db).to_string()
- };
- if enforce_lowercase || !self.case_sensitive {
+ let mut input = import.name.display(db.upcast()).to_string();
+ let case_insensitive = enforce_lowercase || !self.case_sensitive;
+ if case_insensitive {
input.make_ascii_lowercase();
}
- let query_string =
- if !enforce_lowercase && self.case_sensitive { &self.query } else { &self.lowercased };
+ let query_string = if case_insensitive { &self.lowercased } else { &self.query };
match self.search_mode {
- SearchMode::Equals => &input == query_string,
- SearchMode::Contains => input.contains(query_string),
+ SearchMode::Exact => &input == query_string,
SearchMode::Fuzzy => {
- let mut unchecked_query_chars = query_string.chars();
- let mut mismatching_query_char = unchecked_query_chars.next();
-
- for input_char in input.chars() {
- match mismatching_query_char {
- None => return true,
- Some(matching_query_char) if matching_query_char == input_char => {
- mismatching_query_char = unchecked_query_chars.next();
- }
- _ => (),
+ let mut input_chars = input.chars();
+ for query_char in query_string.chars() {
+ if input_chars.find(|&it| it == query_char).is_none() {
+ return false;
}
}
- mismatching_query_char.is_none()
+ true
}
}
}
}
-/// Searches dependencies of `krate` for an importable path matching `query`.
+/// Searches dependencies of `krate` for an importable name matching `query`.
///
/// This returns a list of items that could be imported from dependencies of `krate`.
pub fn search_dependencies(
@@ -442,65 +337,44 @@ pub fn search_dependencies(
let mut stream = op.union();
- let mut all_indexed_values = FxHashSet::default();
- while let Some((_, indexed_values)) = stream.next() {
- all_indexed_values.extend(indexed_values.iter().copied());
- }
-
let mut res = FxHashSet::default();
- for indexed_value in all_indexed_values {
- let import_map = &import_maps[indexed_value.index];
- let importables = &import_map.importables[indexed_value.value as usize..];
+ while let Some((_, indexed_values)) = stream.next() {
+ for indexed_value in indexed_values {
+ let import_map = &import_maps[indexed_value.index];
+ let importables = &import_map.importables[indexed_value.value as usize..];
- let common_importable_data = &import_map.map[&importables[0]];
- if !query.import_matches(db, common_importable_data, true) {
- continue;
- }
+ let common_importable_data = &import_map.map[&importables[0]];
+ if !query.import_matches(db, common_importable_data, true) {
+ continue;
+ }
- // Path shared by the importable items in this group.
- let common_importables_path_fst = fst_path(db, &common_importable_data.path);
- // Add the items from this `ModPath` group. Those are all subsequent items in
- // `importables` whose paths match `path`.
- let iter = importables
- .iter()
- .copied()
- .take_while(|item| {
- common_importables_path_fst == fst_path(db, &import_map.map[item].path)
- })
- .filter(|&item| match item_import_kind(item) {
- Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind),
- None => true,
- })
- .filter(|item| {
- !query.case_sensitive // we've already checked the common importables path case-insensitively
+ // Name shared by the importable items in this group.
+ let common_importable_name =
+ common_importable_data.name.to_smol_str().to_ascii_lowercase();
+ // Add the items from this name group. Those are all subsequent items in
+ // `importables` whose name match `common_importable_name`.
+ let iter = importables
+ .iter()
+ .copied()
+ .take_while(|item| {
+ common_importable_name
+ == import_map.map[item].name.to_smol_str().to_ascii_lowercase()
+ })
+ .filter(|item| {
+ !query.case_sensitive // we've already checked the common importables name case-insensitively
|| query.import_matches(db, &import_map.map[item], false)
- });
- res.extend(iter);
+ });
+ res.extend(iter);
- if res.len() >= query.limit {
- return res;
+ if res.len() >= query.limit {
+ return res;
+ }
}
}
res
}
-fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
- Some(match item.as_module_def_id()? {
- ModuleDefId::ModuleId(_) => ImportKind::Module,
- ModuleDefId::FunctionId(_) => ImportKind::Function,
- ModuleDefId::AdtId(_) => ImportKind::Adt,
- ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant,
- ModuleDefId::ConstId(_) => ImportKind::Const,
- ModuleDefId::StaticId(_) => ImportKind::Static,
- ModuleDefId::TraitId(_) => ImportKind::Trait,
- ModuleDefId::TraitAliasId(_) => ImportKind::TraitAlias,
- ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias,
- ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType,
- ModuleDefId::MacroId(_) => ImportKind::Macro,
- })
-}
-
#[cfg(test)]
mod tests {
use base_db::{fixture::WithFixture, SourceDatabase, Upcast};
@@ -510,16 +384,39 @@ mod tests {
use super::*;
+ impl ImportMap {
+ fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
+ let mut importable_paths: Vec<_> = self
+ .map
+ .iter()
+ .map(|(item, info)| {
+ let path = render_path(db, info);
+ let ns = match item {
+ ItemInNs::Types(_) => "t",
+ ItemInNs::Values(_) => "v",
+ ItemInNs::Macros(_) => "m",
+ };
+ format!("- {path} ({ns})")
+ })
+ .collect();
+
+ importable_paths.sort();
+ importable_paths.join("\n")
+ }
+ }
+
fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) {
let db = TestDB::with_files(ra_fixture);
let crate_graph = db.crate_graph();
let krate = crate_graph
.iter()
- .find(|krate| {
- crate_graph[*krate].display_name.as_ref().map(|n| n.to_string())
- == Some(crate_name.to_string())
+ .find(|&krate| {
+ crate_graph[krate]
+ .display_name
+ .as_ref()
+ .is_some_and(|it| &**it.crate_name() == crate_name)
})
- .unwrap();
+ .expect("could not find crate");
let actual = search_dependencies(db.upcast(), krate, query)
.into_iter()
@@ -530,7 +427,7 @@ mod tests {
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
Some(assoc_item_path) => (assoc_item_path, "a"),
None => (
- dependency_imports.path_of(dependency)?.display(&db).to_string(),
+ render_path(&db, dependency_imports.import_info_for(dependency)?),
match dependency {
ItemInNs::Types(ModuleDefId::FunctionId(_))
| ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
@@ -560,57 +457,25 @@ mod tests {
dependency_imports: &ImportMap,
dependency: ItemInNs,
) -> Option<String> {
- let dependency_assoc_item_id = match dependency {
- ItemInNs::Types(ModuleDefId::FunctionId(id))
- | ItemInNs::Values(ModuleDefId::FunctionId(id)) => AssocItemId::from(id),
- ItemInNs::Types(ModuleDefId::ConstId(id))
- | ItemInNs::Values(ModuleDefId::ConstId(id)) => AssocItemId::from(id),
- ItemInNs::Types(ModuleDefId::TypeAliasId(id))
- | ItemInNs::Values(ModuleDefId::TypeAliasId(id)) => AssocItemId::from(id),
+ let (dependency_assoc_item_id, container) = match dependency.as_module_def_id()? {
+ ModuleDefId::FunctionId(id) => (AssocItemId::from(id), id.lookup(db).container),
+ ModuleDefId::ConstId(id) => (AssocItemId::from(id), id.lookup(db).container),
+ ModuleDefId::TypeAliasId(id) => (AssocItemId::from(id), id.lookup(db).container),
_ => return None,
};
- let trait_ = assoc_to_trait(db, dependency)?;
- if let ModuleDefId::TraitId(tr) = trait_.as_module_def_id()? {
- let trait_data = db.trait_data(tr);
- let assoc_item_name =
- trait_data.items.iter().find_map(|(assoc_item_name, assoc_item_id)| {
- if &dependency_assoc_item_id == assoc_item_id {
- Some(assoc_item_name)
- } else {
- None
- }
- })?;
- return Some(format!(
- "{}::{}",
- dependency_imports.path_of(trait_)?.display(db),
- assoc_item_name.display(db.upcast())
- ));
- }
- None
- }
-
- fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> Option<ItemInNs> {
- let assoc: AssocItemId = match item {
- ItemInNs::Types(it) | ItemInNs::Values(it) => match it {
- ModuleDefId::TypeAliasId(it) => it.into(),
- ModuleDefId::FunctionId(it) => it.into(),
- ModuleDefId::ConstId(it) => it.into(),
- _ => return None,
- },
- _ => return None,
+ let ItemContainerId::TraitId(trait_id) = container else {
+ return None;
};
- let container = match assoc {
- AssocItemId::FunctionId(it) => it.lookup(db).container,
- AssocItemId::ConstId(it) => it.lookup(db).container,
- AssocItemId::TypeAliasId(it) => it.lookup(db).container,
- };
+ let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?;
- match container {
- ItemContainerId::TraitId(it) => Some(ItemInNs::Types(it.into())),
- _ => None,
- }
+ let trait_data = db.trait_data(trait_id);
+ let (assoc_item_name, _) = trait_data
+ .items
+ .iter()
+ .find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?;
+ Some(format!("{}::{}", render_path(db, trait_info), assoc_item_name.display(db.upcast())))
}
fn check(ra_fixture: &str, expect: Expect) {
@@ -633,6 +498,24 @@ mod tests {
expect.assert_eq(&actual)
}
+ fn render_path(db: &dyn DefDatabase, info: &ImportInfo) -> String {
+ let mut module = info.container;
+ let mut segments = vec![&info.name];
+
+ let def_map = module.def_map(db);
+ assert!(def_map.block_id().is_none(), "block local items should not be in `ImportMap`");
+
+ while let Some(parent) = module.containing_module(db) {
+ let parent_data = &def_map[parent.local_id];
+ let (name, _) =
+ parent_data.children.iter().find(|(_, id)| **id == module.local_id).unwrap();
+ segments.push(name);
+ module = parent;
+ }
+
+ segments.iter().rev().map(|it| it.display(db.upcast())).join("::")
+ }
+
#[test]
fn smoke() {
check(
@@ -749,6 +632,7 @@ mod tests {
#[test]
fn module_reexport() {
// Reexporting modules from a dependency adds all contents to the import map.
+ // XXX: The rendered paths are relative to the defining crate.
check(
r"
//- /main.rs crate:main deps:lib
@@ -764,9 +648,9 @@ mod tests {
- module::S (t)
- module::S (v)
main:
+ - module::S (t)
+ - module::S (v)
- reexported_module (t)
- - reexported_module::S (t)
- - reexported_module::S (v)
"#]],
);
}
@@ -868,10 +752,9 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ Query::new("fmt".to_string()).fuzzy(),
expect![[r#"
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::FMT_CONST (a)
dep::fmt::Display::format_function (a)
dep::fmt::Display::format_method (a)
@@ -898,7 +781,9 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy).assoc_items_only(),
+ Query::new("fmt".to_string())
+ .fuzzy()
+ .assoc_search_mode(AssocSearchMode::AssocItemsOnly),
expect![[r#"
dep::fmt::Display::FMT_CONST (a)
dep::fmt::Display::format_function (a)
@@ -909,23 +794,10 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string())
- .search_mode(SearchMode::Fuzzy)
- .exclude_import_kind(ImportKind::AssociatedItem),
+ Query::new("fmt".to_string()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude),
expect![[r#"
- dep::fmt (t)
- dep::fmt::Display (t)
- "#]],
- );
-
- check_search(
- ra_fixture,
- "main",
- Query::new("fmt".to_string())
- .search_mode(SearchMode::Fuzzy)
- .assoc_items_only()
- .exclude_import_kind(ImportKind::AssociatedItem),
- expect![[r#""#]],
+ dep::fmt (t)
+ "#]],
);
}
@@ -958,13 +830,12 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
+ Query::new("fmt".to_string()).fuzzy(),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
dep::Fmt (v)
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::fmt (a)
dep::format (f)
"#]],
@@ -973,26 +844,12 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Equals),
- expect![[r#"
- dep::Fmt (m)
- dep::Fmt (t)
- dep::Fmt (v)
- dep::fmt (t)
- dep::fmt::Display::fmt (a)
- "#]],
- );
-
- check_search(
- ra_fixture,
- "main",
- Query::new("fmt".to_string()).search_mode(SearchMode::Contains),
+ Query::new("fmt".to_string()),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
dep::Fmt (v)
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::fmt (a)
"#]],
);
@@ -1033,7 +890,6 @@ mod tests {
dep::Fmt (t)
dep::Fmt (v)
dep::fmt (t)
- dep::fmt::Display (t)
dep::fmt::Display::fmt (a)
"#]],
);
@@ -1041,7 +897,7 @@ mod tests {
check_search(
ra_fixture,
"main",
- Query::new("fmt".to_string()).name_only(),
+ Query::new("fmt".to_string()),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
@@ -1106,43 +962,10 @@ mod tests {
pub fn no() {}
"#,
"main",
- Query::new("".to_string()).limit(2),
- expect![[r#"
- dep::Fmt (m)
- dep::Fmt (t)
- dep::Fmt (v)
- dep::fmt (t)
- "#]],
- );
- }
-
- #[test]
- fn search_exclusions() {
- let ra_fixture = r#"
- //- /main.rs crate:main deps:dep
- //- /dep.rs crate:dep
-
- pub struct fmt;
- pub struct FMT;
- "#;
-
- check_search(
- ra_fixture,
- "main",
- Query::new("FMT".to_string()),
+ Query::new("".to_string()).fuzzy().limit(1),
expect![[r#"
- dep::FMT (t)
- dep::FMT (v)
- dep::fmt (t)
- dep::fmt (v)
+ dep::fmt::Display (t)
"#]],
);
-
- check_search(
- ra_fixture,
- "main",
- Query::new("FMT".to_string()).exclude_import_kind(ImportKind::Adt),
- expect![[r#""#]],
- );
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
index 2001fb29a..873accafb 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -14,8 +14,9 @@ use stdx::format_to;
use syntax::ast;
use crate::{
- db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, HasModule,
- ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
+ db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId,
+ ExternCrateId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
+ UseId,
};
#[derive(Copy, Clone, Debug)]
@@ -50,6 +51,7 @@ pub struct ItemScope {
unnamed_consts: Vec<ConstId>,
/// Traits imported via `use Trait as _;`.
unnamed_trait_imports: FxHashMap<TraitId, Visibility>,
+ extern_crate_decls: Vec<ExternCrateId>,
/// Macros visible in current module in legacy textual scope
///
/// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first.
@@ -112,6 +114,17 @@ impl ItemScope {
self.declarations.iter().copied()
}
+ pub fn extern_crate_decls(
+ &self,
+ ) -> impl Iterator<Item = ExternCrateId> + ExactSizeIterator + '_ {
+ self.extern_crate_decls.iter().copied()
+ }
+
+ pub fn use_decls(&self) -> impl Iterator<Item = UseId> + ExactSizeIterator + '_ {
+ // FIXME: to be implemented
+ std::iter::empty()
+ }
+
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
self.impls.iter().copied()
}
@@ -188,7 +201,11 @@ impl ItemScope {
}
pub(crate) fn define_impl(&mut self, imp: ImplId) {
- self.impls.push(imp)
+ self.impls.push(imp);
+ }
+
+ pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) {
+ self.extern_crate_decls.push(extern_crate);
}
pub(crate) fn define_unnamed_const(&mut self, konst: ConstId) {
@@ -397,6 +414,7 @@ impl ItemScope {
legacy_macros,
attr_macros,
derive_macros,
+ extern_crate_decls,
} = self;
types.shrink_to_fit();
values.shrink_to_fit();
@@ -409,6 +427,7 @@ impl ItemScope {
legacy_macros.shrink_to_fit();
attr_macros.shrink_to_fit();
derive_macros.shrink_to_fit();
+ extern_crate_decls.shrink_to_fit();
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index e74b71888..c9b0f75f1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -46,7 +46,7 @@ use ast::{AstNode, HasName, StructKind};
use base_db::CrateId;
use either::Either;
use hir_expand::{
- ast_id_map::FileAstId,
+ ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs,
hygiene::Hygiene,
name::{name, AsName, Name},
@@ -188,7 +188,7 @@ impl ItemTree {
fn shrink_to_fit(&mut self) {
if let Some(data) = &mut self.data {
let ItemTreeData {
- imports,
+ uses,
extern_crates,
extern_blocks,
functions,
@@ -211,7 +211,7 @@ impl ItemTree {
vis,
} = &mut **data;
- imports.shrink_to_fit();
+ uses.shrink_to_fit();
extern_crates.shrink_to_fit();
extern_blocks.shrink_to_fit();
functions.shrink_to_fit();
@@ -262,7 +262,7 @@ static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(P
#[derive(Default, Debug, Eq, PartialEq)]
struct ItemTreeData {
- imports: Arena<Import>,
+ uses: Arena<Use>,
extern_crates: Arena<ExternCrate>,
extern_blocks: Arena<ExternBlock>,
functions: Arena<Function>,
@@ -314,7 +314,7 @@ from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Id
/// Trait implemented by all item nodes in the item tree.
pub trait ItemTreeNode: Clone {
- type Source: AstNode + Into<ast::Item>;
+ type Source: AstIdNode + Into<ast::Item>;
fn ast_id(&self) -> FileAstId<Self::Source>;
@@ -486,7 +486,7 @@ macro_rules! mod_items {
}
mod_items! {
- Import in imports -> ast::Use,
+ Use in uses -> ast::Use,
ExternCrate in extern_crates -> ast::ExternCrate,
ExternBlock in extern_blocks -> ast::ExternBlock,
Function in functions -> ast::Fn,
@@ -541,7 +541,7 @@ impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
}
#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct Import {
+pub struct Use {
pub visibility: RawVisibilityId,
pub ast_id: FileAstId<ast::Use>,
pub use_tree: UseTree,
@@ -744,7 +744,7 @@ pub struct MacroDef {
pub ast_id: FileAstId<ast::MacroDef>,
}
-impl Import {
+impl Use {
/// Maps a `UseTree` contained in this import back to its AST node.
pub fn use_tree_to_ast(
&self,
@@ -870,7 +870,7 @@ macro_rules! impl_froms {
impl ModItem {
pub fn as_assoc_item(&self) -> Option<AssocItem> {
match self {
- ModItem::Import(_)
+ ModItem::Use(_)
| ModItem::ExternCrate(_)
| ModItem::ExternBlock(_)
| ModItem::Struct(_)
@@ -892,7 +892,7 @@ impl ModItem {
pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
match self {
- ModItem::Import(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Use(it) => tree[it.index].ast_id().upcast(),
ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
ModItem::Function(it) => tree[it.index].ast_id().upcast(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index 46633667e..7b898e62d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -502,13 +502,13 @@ impl<'a> Ctx<'a> {
Some(id(self.data().impls.alloc(res)))
}
- fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Import>> {
+ fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
- let res = Import { visibility, ast_id, use_tree };
- Some(id(self.data().imports.alloc(res)))
+ let res = Use { visibility, ast_id, use_tree };
+ Some(id(self.data().uses.alloc(res)))
}
fn lower_extern_crate(
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index e873316a5..da30830fe 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -52,7 +52,7 @@ struct Printer<'a> {
needs_indent: bool,
}
-impl<'a> Printer<'a> {
+impl Printer<'_> {
fn indented(&mut self, f: impl FnOnce(&mut Self)) {
self.indent_level += 1;
wln!(self);
@@ -198,8 +198,8 @@ impl<'a> Printer<'a> {
self.print_attrs_of(item);
match item {
- ModItem::Import(it) => {
- let Import { visibility, use_tree, ast_id: _ } = &self.tree[it];
+ ModItem::Use(it) => {
+ let Use { visibility, use_tree, ast_id: _ } = &self.tree[it];
self.print_visibility(*visibility);
w!(self, "use ");
self.print_use_tree(use_tree);
@@ -572,7 +572,7 @@ impl<'a> Printer<'a> {
}
}
-impl<'a> Write for Printer<'a> {
+impl Write for Printer<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
for line in s.split_inclusive('\n') {
if self.needs_indent {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
index 0e9ac58fb..627479bb7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -180,15 +180,15 @@ impl LangItems {
T: Into<AttrDefId> + Copy,
{
let _p = profile::span("collect_lang_item");
- if let Some(lang_item) = lang_attr(db, item) {
+ if let Some(lang_item) = db.lang_attr(item.into()) {
self.items.entry(lang_item).or_insert_with(|| constructor(item));
}
}
}
-pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<LangItem> {
- let attrs = db.attrs(item.into());
- attrs.by_key("lang").string_value().cloned().and_then(|it| LangItem::from_str(&it))
+pub(crate) fn lang_attr_query(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> {
+ let attrs = db.attrs(item);
+ attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(&it))
}
pub enum GenericRequirement {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 9d8b57a0d..1901db8a0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -64,7 +64,7 @@ use std::{
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
use hir_expand::{
- ast_id_map::FileAstId,
+ ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
@@ -88,8 +88,8 @@ use crate::{
builtin_type::BuiltinType,
data::adt::VariantData,
item_tree::{
- Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, Static,
- Struct, Trait, TraitAlias, TypeAlias, Union,
+ Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules,
+ Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use,
},
};
@@ -121,6 +121,12 @@ impl From<CrateRootModuleId> for ModuleDefId {
}
}
+impl From<CrateId> for CrateRootModuleId {
+ fn from(krate: CrateId) -> Self {
+ CrateRootModuleId { krate }
+ }
+}
+
impl TryFrom<ModuleId> for CrateRootModuleId {
type Error = ();
@@ -145,24 +151,28 @@ pub struct ModuleId {
}
impl ModuleId {
- pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
+ pub fn def_map(self, db: &dyn db::DefDatabase) -> Arc<DefMap> {
match self.block {
Some(block) => db.block_def_map(block),
None => db.crate_def_map(self.krate),
}
}
- pub fn krate(&self) -> CrateId {
+ pub fn krate(self) -> CrateId {
self.krate
}
- pub fn containing_module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
+ pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id)
}
- pub fn containing_block(&self) -> Option<BlockId> {
+ pub fn containing_block(self) -> Option<BlockId> {
self.block
}
+
+ pub fn is_block_module(self) -> bool {
+ self.block.is_some() && self.local_id == DefMap::ROOT
+ }
}
/// An ID of a module, **local** to a `DefMap`.
@@ -314,6 +324,16 @@ type ImplLoc = ItemLoc<Impl>;
impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct UseId(salsa::InternId);
+type UseLoc = ItemLoc<Use>;
+impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct ExternCrateId(salsa::InternId);
+type ExternCrateLoc = ItemLoc<ExternCrate>;
+impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
pub struct ExternBlockId(salsa::InternId);
type ExternBlockLoc = ItemLoc<ExternBlock>;
impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
@@ -392,14 +412,14 @@ impl TypeParamId {
impl TypeParamId {
/// Caller should check if this toc id really belongs to a type
- pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
- Self(x)
+ pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
+ Self(it)
}
}
impl From<TypeParamId> for TypeOrConstParamId {
- fn from(x: TypeParamId) -> Self {
- x.0
+ fn from(it: TypeParamId) -> Self {
+ it.0
}
}
@@ -418,14 +438,14 @@ impl ConstParamId {
impl ConstParamId {
/// Caller should check if this toc id really belongs to a const
- pub fn from_unchecked(x: TypeOrConstParamId) -> Self {
- Self(x)
+ pub fn from_unchecked(it: TypeOrConstParamId) -> Self {
+ Self(it)
}
}
impl From<ConstParamId> for TypeOrConstParamId {
- fn from(x: ConstParamId) -> Self {
- x.0
+ fn from(it: ConstParamId) -> Self {
+ it.0
}
}
@@ -548,14 +568,14 @@ pub enum TypeOwnerId {
impl TypeOwnerId {
fn as_generic_def_id(self) -> Option<GenericDefId> {
Some(match self {
- TypeOwnerId::FunctionId(x) => GenericDefId::FunctionId(x),
- TypeOwnerId::ConstId(x) => GenericDefId::ConstId(x),
- TypeOwnerId::AdtId(x) => GenericDefId::AdtId(x),
- TypeOwnerId::TraitId(x) => GenericDefId::TraitId(x),
- TypeOwnerId::TraitAliasId(x) => GenericDefId::TraitAliasId(x),
- TypeOwnerId::TypeAliasId(x) => GenericDefId::TypeAliasId(x),
- TypeOwnerId::ImplId(x) => GenericDefId::ImplId(x),
- TypeOwnerId::EnumVariantId(x) => GenericDefId::EnumVariantId(x),
+ TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it),
+ TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it),
+ TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it),
+ TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it),
+ TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it),
+ TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
+ TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it),
+ TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it),
TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => {
return None
}
@@ -578,15 +598,15 @@ impl_from!(
for TypeOwnerId
);
-// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let x: Type = _; }`)
+// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let it: Type = _; }`)
impl From<DefWithBodyId> for TypeOwnerId {
fn from(value: DefWithBodyId) -> Self {
match value {
- DefWithBodyId::FunctionId(x) => x.into(),
- DefWithBodyId::StaticId(x) => x.into(),
- DefWithBodyId::ConstId(x) => x.into(),
- DefWithBodyId::InTypeConstId(x) => x.into(),
- DefWithBodyId::VariantId(x) => x.into(),
+ DefWithBodyId::FunctionId(it) => it.into(),
+ DefWithBodyId::StaticId(it) => it.into(),
+ DefWithBodyId::ConstId(it) => it.into(),
+ DefWithBodyId::InTypeConstId(it) => it.into(),
+ DefWithBodyId::VariantId(it) => it.into(),
}
}
}
@@ -594,14 +614,14 @@ impl From<DefWithBodyId> for TypeOwnerId {
impl From<GenericDefId> for TypeOwnerId {
fn from(value: GenericDefId) -> Self {
match value {
- GenericDefId::FunctionId(x) => x.into(),
- GenericDefId::AdtId(x) => x.into(),
- GenericDefId::TraitId(x) => x.into(),
- GenericDefId::TraitAliasId(x) => x.into(),
- GenericDefId::TypeAliasId(x) => x.into(),
- GenericDefId::ImplId(x) => x.into(),
- GenericDefId::EnumVariantId(x) => x.into(),
- GenericDefId::ConstId(x) => x.into(),
+ GenericDefId::FunctionId(it) => it.into(),
+ GenericDefId::AdtId(it) => it.into(),
+ GenericDefId::TraitId(it) => it.into(),
+ GenericDefId::TraitAliasId(it) => it.into(),
+ GenericDefId::TypeAliasId(it) => it.into(),
+ GenericDefId::ImplId(it) => it.into(),
+ GenericDefId::EnumVariantId(it) => it.into(),
+ GenericDefId::ConstId(it) => it.into(),
}
}
}
@@ -716,7 +736,7 @@ impl GeneralConstId {
.const_data(const_id)
.name
.as_ref()
- .and_then(|x| x.as_str())
+ .and_then(|it| it.as_str())
.unwrap_or("_")
.to_owned(),
GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"),
@@ -821,6 +841,8 @@ pub enum AttrDefId {
ImplId(ImplId),
GenericParamId(GenericParamId),
ExternBlockId(ExternBlockId),
+ ExternCrateId(ExternCrateId),
+ UseId(UseId),
}
impl_from!(
@@ -835,7 +857,8 @@ impl_from!(
TypeAliasId,
MacroId(Macro2Id, MacroRulesId, ProcMacroId),
ImplId,
- GenericParamId
+ GenericParamId,
+ ExternCrateId
for AttrDefId
);
@@ -927,6 +950,12 @@ impl HasModule for AdtId {
}
}
+impl HasModule for ExternCrateId {
+ fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
+ self.lookup(db).container
+ }
+}
+
impl HasModule for VariantId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self {
@@ -950,17 +979,17 @@ impl HasModule for MacroId {
impl HasModule for TypeOwnerId {
fn module(&self, db: &dyn db::DefDatabase) -> ModuleId {
match self {
- TypeOwnerId::FunctionId(x) => x.lookup(db).module(db),
- TypeOwnerId::StaticId(x) => x.lookup(db).module(db),
- TypeOwnerId::ConstId(x) => x.lookup(db).module(db),
- TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.module(db),
- TypeOwnerId::AdtId(x) => x.module(db),
- TypeOwnerId::TraitId(x) => x.lookup(db).container,
- TypeOwnerId::TraitAliasId(x) => x.lookup(db).container,
- TypeOwnerId::TypeAliasId(x) => x.lookup(db).module(db),
- TypeOwnerId::ImplId(x) => x.lookup(db).container,
- TypeOwnerId::EnumVariantId(x) => x.parent.lookup(db).container,
- TypeOwnerId::ModuleId(x) => *x,
+ TypeOwnerId::FunctionId(it) => it.lookup(db).module(db),
+ TypeOwnerId::StaticId(it) => it.lookup(db).module(db),
+ TypeOwnerId::ConstId(it) => it.lookup(db).module(db),
+ TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db),
+ TypeOwnerId::AdtId(it) => it.module(db),
+ TypeOwnerId::TraitId(it) => it.lookup(db).container,
+ TypeOwnerId::TraitAliasId(it) => it.lookup(db).container,
+ TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db),
+ TypeOwnerId::ImplId(it) => it.lookup(db).container,
+ TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container,
+ TypeOwnerId::ModuleId(it) => *it,
}
}
}
@@ -1050,6 +1079,8 @@ impl AttrDefId {
.krate
}
AttrDefId::MacroId(it) => it.module(db).krate,
+ AttrDefId::ExternCrateId(it) => it.lookup(db).container.krate,
+ AttrDefId::UseId(it) => it.lookup(db).container.krate,
}
}
}
@@ -1060,7 +1091,7 @@ pub trait AsMacroCall {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Option<MacroCallId> {
self.as_call_id_with_errors(db, krate, resolver).ok()?.value
}
@@ -1069,7 +1100,7 @@ pub trait AsMacroCall {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
}
@@ -1078,7 +1109,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
&self,
db: &dyn ExpandDatabase,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
@@ -1089,24 +1120,25 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
- macro_call_as_call_id_(
+ macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
expands_to,
krate,
resolver,
+ resolver,
)
}
}
/// Helper wrapper for `AstId` with `ModPath`
#[derive(Clone, Debug, Eq, PartialEq)]
-struct AstIdWithPath<T: ast::AstNode> {
+struct AstIdWithPath<T: AstIdNode> {
ast_id: AstId<T>,
path: path::ModPath,
}
-impl<T: ast::AstNode> AstIdWithPath<T> {
+impl<T: AstIdNode> AstIdWithPath<T> {
fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
}
@@ -1117,33 +1149,39 @@ fn macro_call_as_call_id(
call: &AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
- macro_call_as_call_id_(db, call, expand_to, krate, resolver).map(|res| res.value)
+ macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
+ .map(|res| res.value)
}
-fn macro_call_as_call_id_(
+fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
krate: CrateId,
- resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+ resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
+ eager_resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let def =
resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
- let res = if let MacroDefKind::BuiltInEager(..) = def.kind {
- let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
- expand_eager_macro_input(db, krate, macro_call, def, &resolver)?
- } else {
- ExpandResult {
+ let res = match def.kind {
+ MacroDefKind::BuiltInEager(..) => {
+ let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
+ expand_eager_macro_input(db, krate, macro_call, def, &|path| {
+ eager_resolver(path).filter(MacroDefId::is_fn_like)
+ })
+ }
+ _ if def.is_fn_like() => ExpandResult {
value: Some(def.as_lazy_macro(
db,
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
)),
err: None,
- }
+ },
+ _ => return Err(UnresolvedMacro { path: call.path.clone() }),
};
Ok(res)
}
@@ -1228,6 +1266,7 @@ fn derive_macro_as_call_id(
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
let (macro_id, def_id) = resolver(item_attr.path.clone())
+ .filter(|(_, def_id)| def_id.is_derive())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let call_id = def_id.as_lazy_macro(
db.upcast(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
index af623fd0e..e523c2291 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs
@@ -1,5 +1,9 @@
//! Context for lowering paths.
-use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile};
+use hir_expand::{
+ ast_id_map::{AstIdMap, AstIdNode},
+ hygiene::Hygiene,
+ AstId, HirFileId, InFile,
+};
use once_cell::unsync::OnceCell;
use syntax::ast;
use triomphe::Arc;
@@ -37,7 +41,7 @@ impl<'a> LowerCtx<'a> {
Path::from_src(ast, self)
}
- pub(crate) fn ast_id<N: syntax::AstNode>(&self, item: &N) -> Option<AstId<N>> {
+ pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {
let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id));
Some(InFile::new(file_id, ast_id_map.ast_id(item)))
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
index f41f97190..abd84c6a4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs
@@ -279,6 +279,44 @@ impl < > core::cmp::Eq for Command< > where {}"#]],
}
#[test]
+fn test_partial_eq_expand_with_derive_const() {
+ // FIXME: actually expand with const
+ check(
+ r#"
+//- minicore: derive, eq
+#[derive_const(PartialEq, Eq)]
+enum Command {
+ Move { x: i32, y: i32 },
+ Do(&'static str),
+ Jump,
+}
+"#,
+ expect![[r#"
+#[derive_const(PartialEq, Eq)]
+enum Command {
+ Move { x: i32, y: i32 },
+ Do(&'static str),
+ Jump,
+}
+
+impl < > core::cmp::PartialEq for Command< > where {
+ fn eq(&self , other: &Self ) -> bool {
+ match (self , other) {
+ (Command::Move {
+ x: x_self, y: y_self,
+ }
+ , Command::Move {
+ x: x_other, y: y_other,
+ }
+ )=>x_self.eq(x_other) && y_self.eq(y_other), (Command::Do(f0_self, ), Command::Do(f0_other, ))=>f0_self.eq(f0_other), (Command::Jump, Command::Jump)=>true , _unused=>false
+ }
+ }
+}
+impl < > core::cmp::Eq for Command< > where {}"#]],
+ );
+}
+
+#[test]
fn test_partial_ord_expand() {
check(
r#"
@@ -379,6 +417,44 @@ fn test_hash_expand() {
use core::hash::Hash;
#[derive(Hash)]
+struct Foo {
+ x: i32,
+ y: u64,
+ z: (i32, u64),
+}
+"#,
+ expect![[r#"
+use core::hash::Hash;
+
+#[derive(Hash)]
+struct Foo {
+ x: i32,
+ y: u64,
+ z: (i32, u64),
+}
+
+impl < > core::hash::Hash for Foo< > where {
+ fn hash<H: core::hash::Hasher>(&self , ra_expand_state: &mut H) {
+ match self {
+ Foo {
+ x: x, y: y, z: z,
+ }
+ => {
+ x.hash(ra_expand_state);
+ y.hash(ra_expand_state);
+ z.hash(ra_expand_state);
+ }
+ ,
+ }
+ }
+}"#]],
+ );
+ check(
+ r#"
+//- minicore: derive, hash
+use core::hash::Hash;
+
+#[derive(Hash)]
enum Command {
Move { x: i32, y: i32 },
Do(&'static str),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 07d9baa58..1250cbb74 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -201,7 +201,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(arg2), ::core::fmt::Debug::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(arg2), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
@@ -235,11 +235,11 @@ macro_rules! format_args {
fn main() {
/* error: no rule matches input tokens */;
- /* error: no rule matches input tokens */;
- /* error: no rule matches input tokens */;
- /* error: no rule matches input tokens */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(), ::core::fmt::Display::fmt), ]);
- /* error: no rule matches input tokens */;
- ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(5), ::core::fmt::Display::fmt), ]);
+ /* error: expected expression */;
+ /* error: expected expression, expected COMMA */;
+ /* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]);
+ /* error: expected expression, expected expression */;
+ ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]);
}
"##]],
);
@@ -267,7 +267,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
@@ -300,7 +300,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::Argument::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::ArgumentV1::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]);
}
"##]],
);
@@ -334,7 +334,7 @@ macro_rules! format_args {
}
fn main() {
- ::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::Argument::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]);
+ ::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::ArgumentV1::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
@@ -364,8 +364,8 @@ macro_rules! format_args {
fn main() {
let _ =
- /* error: no rule matches input tokens *//* parse error: expected field name or number */
-::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(), ::core::fmt::Debug::fmt), ]);
+ /* error: expected field name or number *//* parse error: expected field name or number */
+::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(), ::core::fmt::Debug::fmt), ]);
}
"##]],
);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 553ffe3d0..2170cadcf 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -98,6 +98,66 @@ fn#19 main#20(#21)#21 {#22
"##]],
);
}
+
+#[test]
+fn eager_expands_with_unresolved_within() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+fn main(foo: ()) {
+ format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+}
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+fn main(foo: ()) {
+ /* error: unresolved macro identity */::core::fmt::Arguments::new_v1(&["", " ", " ", ], &[::core::fmt::ArgumentV1::new(&(::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(0), ::core::fmt::Display::fmt), ])), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(foo), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(identity!(10)), ::core::fmt::Display::fmt), ])
+}
+"##]],
+ );
+}
+
+#[test]
+fn token_mapping_eager() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+macro_rules! identity {
+ ($expr:expr) => { $expr };
+}
+
+fn main(foo: ()) {
+ format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+}
+
+"#,
+ expect![[r##"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+macro_rules! identity {
+ ($expr:expr) => { $expr };
+}
+
+fn main(foo: ()) {
+ // format_args/*+tokenids*/!("{} {} {}"#1,#3 format_args!("{}", 0#10),#12 foo#13,#14 identity!(10#18),#21 "bar"#22)
+::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 " "#4294967295,#4294967295 " "#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#42949672950#10)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#4294967295foo#13)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#429496729510#18)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295
+}
+
+"##]],
+ );
+}
+
#[test]
fn float_field_access_macro_input() {
check(
@@ -813,6 +873,37 @@ fn foo() {
}
#[test]
+fn test_type_path_is_transcribed_as_expr_path() {
+ check(
+ r#"
+macro_rules! m {
+ ($p:path) => { let $p; }
+}
+fn test() {
+ m!(S)
+ m!(S<i32>)
+ m!(S<S<i32>>)
+ m!(S<{ module::CONST < 42 }>)
+}
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($p:path) => { let $p; }
+}
+fn test() {
+ let S;
+ let S:: <i32> ;
+ let S:: <S:: <i32>> ;
+ let S:: < {
+ module::CONST<42
+ }
+ > ;
+}
+"#]],
+ );
+}
+
+#[test]
fn test_expr() {
check(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index 4a62696df..7a87e61c6 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -20,8 +20,8 @@ use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
- db::{ExpandDatabase, TokenExpander},
- AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
+ db::{DeclarativeMacroExpander, ExpandDatabase},
+ AstId, InFile, MacroFile,
};
use stdx::format_to;
use syntax::{
@@ -100,32 +100,29 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let call_offset = macro_.syntax().text_range().start().into();
let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
- let kind = MacroDefKind::Declarative(ast_id);
- let macro_def = db
- .macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false })
- .unwrap();
- if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
- let tt = match &macro_ {
- ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
- ast::Macro::MacroDef(_) => unimplemented!(""),
- };
+ let DeclarativeMacroExpander { mac, def_site_token_map } =
+ &*db.decl_macro_expander(krate, ast_id);
+ assert_eq!(mac.err(), None);
+ let tt = match &macro_ {
+ ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
+ ast::Macro::MacroDef(_) => unimplemented!(""),
+ };
- let tt_start = tt.syntax().text_range().start();
- tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
- |token| {
- let range = token.text_range().checked_sub(tt_start).unwrap();
- if let Some(id) = def_site_token_map.token_by_range(range) {
- let offset = (range.end() + tt_start).into();
- text_edits.push((offset..offset, format!("#{}", id.0)));
- }
- },
- );
- text_edits.push((
- call_offset..call_offset,
- format!("// call ids will be shifted by {:?}\n", mac.shift()),
- ));
- }
+ let tt_start = tt.syntax().text_range().start();
+ tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
+ |token| {
+ let range = token.text_range().checked_sub(tt_start).unwrap();
+ if let Some(id) = def_site_token_map.token_by_range(range) {
+ let offset = (range.end() + tt_start).into();
+ text_edits.push((offset..offset, format!("#{}", id.0)));
+ }
+ },
+ );
+ text_edits.push((
+ call_offset..call_offset,
+ format!("// call ids will be shifted by {:?}\n", mac.shift()),
+ ));
}
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
@@ -190,7 +187,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let range: Range<usize> = range.into();
if show_token_ids {
- if let Some((tree, map, _)) = arg.as_deref() {
+ if let Some((tree, map, _)) = arg.value.as_deref() {
let tt_range = call.token_tree().unwrap().syntax().text_range();
let mut ranges = Vec::new();
extract_id_ranges(&mut ranges, map, tree);
@@ -239,7 +236,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
- if src.file_id.is_builtin_derive(&db).is_some() {
+ if src.file_id.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
format_to!(expanded_text, "\n{}", pp)
}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
index 0ab1bd849..86818ce26 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -60,7 +60,7 @@ mod tests;
use std::{cmp::Ord, ops::Deref};
use base_db::{CrateId, Edition, FileId, ProcMacroKind};
-use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId};
+use hir_expand::{name::Name, HirFileId, InFile, MacroCallId, MacroDefId};
use itertools::Itertools;
use la_arena::Arena;
use profile::Count;
@@ -196,6 +196,10 @@ impl BlockRelativeModuleId {
fn into_module(self, krate: CrateId) -> ModuleId {
ModuleId { krate, block: self.block, local_id: self.local_id }
}
+
+ fn is_block_module(self) -> bool {
+ self.block.is_some() && self.local_id == DefMap::ROOT
+ }
}
impl std::ops::Index<LocalModuleId> for DefMap {
@@ -278,7 +282,9 @@ pub struct ModuleData {
pub origin: ModuleOrigin,
/// Declared visibility of this module.
pub visibility: Visibility,
- /// Always [`None`] for block modules
+ /// Parent module in the same `DefMap`.
+ ///
+ /// [`None`] for block modules because they are always its `DefMap`'s root.
pub parent: Option<LocalModuleId>,
pub children: FxHashMap<Name, LocalModuleId>,
pub scope: ItemScope,
@@ -626,6 +632,17 @@ impl ModuleData {
self.origin.definition_source(db)
}
+ /// Same as [`definition_source`] but only returns the file id to prevent parsing the ASt.
+ pub fn definition_source_file_id(&self) -> HirFileId {
+ match self.origin {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
+ definition.into()
+ }
+ ModuleOrigin::Inline { definition, .. } => definition.file_id,
+ ModuleOrigin::BlockExpr { block } => block.file_id,
+ }
+ }
+
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
/// `None` for the crate root or block.
pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 62fb3c788..eef54fc49 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -38,7 +38,7 @@ use crate::{
self, ExternCrate, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode,
MacroCall, MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId,
},
- macro_call_as_call_id, macro_id_to_def_id,
+ macro_call_as_call_id, macro_call_as_call_id_with_eager, macro_id_to_def_id,
nameres::{
diagnostics::DefDiagnostic,
mod_resolution::ModDir,
@@ -52,10 +52,10 @@ use crate::{
tt,
visibility::{RawVisibility, Visibility},
AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId,
- ExternBlockLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId,
- Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId,
- ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc,
- TypeAliasLoc, UnionLoc, UnresolvedMacro,
+ ExternBlockLoc, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId,
+ LocalModuleId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc,
+ ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc,
+ TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, UseLoc,
};
static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
@@ -146,7 +146,7 @@ impl PartialResolvedImport {
#[derive(Clone, Debug, Eq, PartialEq)]
enum ImportSource {
- Import { id: ItemTreeId<item_tree::Import>, use_tree: Idx<ast::UseTree> },
+ Use { id: ItemTreeId<item_tree::Use>, use_tree: Idx<ast::UseTree> },
ExternCrate(ItemTreeId<item_tree::ExternCrate>),
}
@@ -156,10 +156,9 @@ struct Import {
alias: Option<ImportAlias>,
visibility: RawVisibility,
kind: ImportKind,
+ source: ImportSource,
is_prelude: bool,
- is_extern_crate: bool,
is_macro_use: bool,
- source: ImportSource,
}
impl Import {
@@ -167,27 +166,24 @@ impl Import {
db: &dyn DefDatabase,
krate: CrateId,
tree: &ItemTree,
- id: ItemTreeId<item_tree::Import>,
- ) -> Vec<Self> {
+ id: ItemTreeId<item_tree::Use>,
+ mut cb: impl FnMut(Self),
+ ) {
let it = &tree[id.value];
let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
let visibility = &tree[it.visibility];
let is_prelude = attrs.by_key("prelude_import").exists();
-
- let mut res = Vec::new();
it.use_tree.expand(|idx, path, kind, alias| {
- res.push(Self {
+ cb(Self {
path,
alias,
visibility: visibility.clone(),
kind,
is_prelude,
- is_extern_crate: false,
is_macro_use: false,
- source: ImportSource::Import { id, use_tree: idx },
+ source: ImportSource::Use { id, use_tree: idx },
});
});
- res
}
fn from_extern_crate(
@@ -205,7 +201,6 @@ impl Import {
visibility: visibility.clone(),
kind: ImportKind::Plain,
is_prelude: false,
- is_extern_crate: true,
is_macro_use: attrs.by_key("macro_use").exists(),
source: ImportSource::ExternCrate(id),
}
@@ -776,7 +771,7 @@ impl DefCollector<'_> {
let _p = profile::span("resolve_import")
.detail(|| format!("{}", import.path.display(self.db.upcast())));
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
- if import.is_extern_crate {
+ if matches!(import.source, ImportSource::ExternCrate { .. }) {
let name = import
.path
.as_ident()
@@ -813,11 +808,8 @@ impl DefCollector<'_> {
}
}
- // Check whether all namespace is resolved
- if def.take_types().is_some()
- && def.take_values().is_some()
- && def.take_macros().is_some()
- {
+ // Check whether all namespaces are resolved.
+ if def.is_full() {
PartialResolvedImport::Resolved(def)
} else {
PartialResolvedImport::Indeterminate(def)
@@ -826,7 +818,7 @@ impl DefCollector<'_> {
}
fn resolve_extern_crate(&self, name: &Name) -> Option<CrateRootModuleId> {
- if *name == name!(self) {
+ if *name == name![self] {
cov_mark::hit!(extern_crate_self_as);
Some(self.def_map.crate_root())
} else {
@@ -867,7 +859,7 @@ impl DefCollector<'_> {
tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
- if import.is_extern_crate
+ if matches!(import.source, ImportSource::ExternCrate { .. })
&& self.def_map.block.is_none()
&& module_id == DefMap::ROOT
{
@@ -1482,7 +1474,7 @@ impl DefCollector<'_> {
}
for directive in &self.unresolved_imports {
- if let ImportSource::Import { id: import, use_tree } = directive.import.source {
+ if let ImportSource::Use { id: import, use_tree } = directive.import.source {
if matches!(
(directive.import.path.segments().first(), &directive.import.path.kind),
(Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate)
@@ -1584,22 +1576,33 @@ impl ModCollector<'_, '_> {
match item {
ModItem::Mod(m) => self.collect_module(m, &attrs),
- ModItem::Import(import_id) => {
- let imports = Import::from_use(
+ ModItem::Use(import_id) => {
+ let _import_id =
+ UseLoc { container: module, id: ItemTreeId::new(self.tree_id, import_id) }
+ .intern(db);
+ Import::from_use(
db,
krate,
self.item_tree,
ItemTreeId::new(self.tree_id, import_id),
- );
- self.def_collector.unresolved_imports.extend(imports.into_iter().map(
- |import| ImportDirective {
- module_id: self.module_id,
- import,
- status: PartialResolvedImport::Unresolved,
+ |import| {
+ self.def_collector.unresolved_imports.push(ImportDirective {
+ module_id: self.module_id,
+ import,
+ status: PartialResolvedImport::Unresolved,
+ });
},
- ));
+ )
}
ModItem::ExternCrate(import_id) => {
+ let extern_crate_id = ExternCrateLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, import_id),
+ }
+ .intern(db);
+ self.def_collector.def_map.modules[self.module_id]
+ .scope
+ .define_extern_crate_decl(extern_crate_id);
self.def_collector.unresolved_imports.push(ImportDirective {
module_id: self.module_id,
import: Import::from_extern_crate(
@@ -2182,7 +2185,7 @@ impl ModCollector<'_, '_> {
// scopes without eager expansion.
// Case 1: try to resolve macro calls with single-segment name and expand macro_rules
- if let Ok(res) = macro_call_as_call_id(
+ if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
mac.expand_to,
@@ -2205,19 +2208,34 @@ impl ModCollector<'_, '_> {
.map(|it| macro_id_to_def_id(self.def_collector.db, it))
})
},
- ) {
- // Legacy macros need to be expanded immediately, so that any macros they produce
- // are in scope.
- if let Some(val) = res {
- self.def_collector.collect_macro_expansion(
+ |path| {
+ let resolved_res = self.def_collector.def_map.resolve_path_fp_with_macro(
+ db,
+ ResolveMode::Other,
self.module_id,
- val,
- self.macro_depth + 1,
- container,
+ &path,
+ BuiltinShadowMode::Module,
+ Some(MacroSubNs::Bang),
);
- }
+ resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(db, it))
+ },
+ ) {
+ // FIXME: if there were errors, this mightve been in the eager expansion from an
+ // unresolved macro, so we need to push this into late macro resolution. see fixme above
+ if res.err.is_none() {
+ // Legacy macros need to be expanded immediately, so that any macros they produce
+ // are in scope.
+ if let Some(val) = res.value {
+ self.def_collector.collect_macro_expansion(
+ self.module_id,
+ val,
+ self.macro_depth + 1,
+ container,
+ );
+ }
- return;
+ return;
+ }
}
// Case 2: resolve in module scope, expand during name resolution.
@@ -2230,8 +2248,12 @@ impl ModCollector<'_, '_> {
}
fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
- let Some((source, target)) = Self::borrow_modules(self.def_collector.def_map.modules.as_mut(), module_id, self.module_id) else {
- return
+ let Some((source, target)) = Self::borrow_modules(
+ self.def_collector.def_map.modules.as_mut(),
+ module_id,
+ self.module_id,
+ ) else {
+ return;
};
for (name, macs) in source.scope.legacy_macros() {
@@ -2271,7 +2293,7 @@ impl ModCollector<'_, '_> {
fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
let ast_id = item.ast_id(self.item_tree);
- let ast_id = InFile::new(self.file_id(), ast_id.upcast());
+ let ast_id = InFile::new(self.file_id(), ast_id.erase());
self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
self.module_id,
ast_id,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
index 18b424255..9cffb3c9f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -2,12 +2,9 @@
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
-use hir_expand::{attrs::AttrId, MacroCallKind};
+use hir_expand::{attrs::AttrId, ErasedAstId, MacroCallKind};
use la_arena::Idx;
-use syntax::{
- ast::{self, AnyHasAttrs},
- SyntaxError,
-};
+use syntax::{ast, SyntaxError};
use crate::{
item_tree::{self, ItemTreeId},
@@ -22,9 +19,9 @@ pub enum DefDiagnosticKind {
UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
- UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
+ UnresolvedImport { id: ItemTreeId<item_tree::Use>, index: Idx<ast::UseTree> },
- UnconfiguredCode { ast: AstId<AnyHasAttrs>, cfg: CfgExpr, opts: CfgOptions },
+ UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions },
UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
@@ -73,7 +70,7 @@ impl DefDiagnostic {
pub(super) fn unresolved_import(
container: LocalModuleId,
- id: ItemTreeId<item_tree::Import>,
+ id: ItemTreeId<item_tree::Use>,
index: Idx<ast::UseTree>,
) -> Self {
Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
@@ -81,7 +78,7 @@ impl DefDiagnostic {
pub fn unconfigured_code(
container: LocalModuleId,
- ast: AstId<ast::AnyHasAttrs>,
+ ast: ErasedAstId,
cfg: CfgExpr,
opts: CfgOptions,
) -> Self {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
index 5f6163175..de22ea101 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs
@@ -12,11 +12,12 @@
use base_db::Edition;
use hir_expand::name::Name;
+use triomphe::Arc;
use crate::{
db::DefDatabase,
item_scope::BUILTIN_SCOPE,
- nameres::{sub_namespace_match, BuiltinShadowMode, DefMap, MacroSubNs},
+ nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs},
path::{ModPath, PathKind},
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
@@ -159,13 +160,15 @@ impl DefMap {
(None, new) => new,
};
- match &current_map.block {
- Some(block) => {
+ match current_map.block {
+ Some(block) if original_module == Self::ROOT => {
+ // Block modules "inherit" names from its parent module.
original_module = block.parent.local_id;
arc = block.parent.def_map(db, current_map.krate);
- current_map = &*arc;
+ current_map = &arc;
}
- None => return result,
+ // Proper (non-block) modules, including those in block `DefMap`s, don't.
+ _ => return result,
}
}
}
@@ -189,7 +192,7 @@ impl DefMap {
));
let mut segments = path.segments().iter().enumerate();
- let mut curr_per_ns: PerNs = match path.kind {
+ let mut curr_per_ns = match path.kind {
PathKind::DollarCrate(krate) => {
if krate == self.krate {
cov_mark::hit!(macro_dollar_crate_self);
@@ -241,51 +244,54 @@ impl DefMap {
)
}
PathKind::Super(lvl) => {
- let mut module = original_module;
- for i in 0..lvl {
- match self.modules[module].parent {
- Some(it) => module = it,
- None => match &self.block {
- Some(block) => {
- // Look up remaining path in parent `DefMap`
- let new_path = ModPath::from_segments(
- PathKind::Super(lvl - i),
- path.segments().to_vec(),
- );
- tracing::debug!(
- "`super` path: {} -> {} in parent map",
- path.display(db.upcast()),
- new_path.display(db.upcast())
- );
- return block
- .parent
- .def_map(db, self.krate)
- .resolve_path_fp_with_macro(
- db,
- mode,
- block.parent.local_id,
- &new_path,
- shadow,
- expected_macro_subns,
- );
- }
- None => {
- tracing::debug!("super path in root module");
- return ResolvePathResult::empty(ReachedFixedPoint::Yes);
- }
- },
- }
+ let mut local_id = original_module;
+ let mut ext;
+ let mut def_map = self;
+
+ // Adjust `local_id` to `self`, i.e. the nearest non-block module.
+ if def_map.module_id(local_id).is_block_module() {
+ (ext, local_id) = adjust_to_nearest_non_block_module(db, def_map, local_id);
+ def_map = &ext;
}
- // Resolve `self` to the containing crate-rooted module if we're a block
- self.with_ancestor_maps(db, module, &mut |def_map, module| {
- if def_map.block.is_some() {
- None // keep ascending
+ // Go up the module tree but skip block modules as `super` always refers to the
+ // nearest non-block module.
+ for _ in 0..lvl {
+ // Loop invariant: at the beginning of each loop, `local_id` must refer to a
+ // non-block module.
+ if let Some(parent) = def_map.modules[local_id].parent {
+ local_id = parent;
+ if def_map.module_id(local_id).is_block_module() {
+ (ext, local_id) =
+ adjust_to_nearest_non_block_module(db, def_map, local_id);
+ def_map = &ext;
+ }
} else {
- Some(PerNs::types(def_map.module_id(module).into(), Visibility::Public))
+ stdx::always!(def_map.block.is_none());
+ tracing::debug!("super path in root module");
+ return ResolvePathResult::empty(ReachedFixedPoint::Yes);
}
- })
- .expect("block DefMap not rooted in crate DefMap")
+ }
+
+ let module = def_map.module_id(local_id);
+ stdx::never!(module.is_block_module());
+
+ if self.block != def_map.block {
+ // If we have a different `DefMap` from `self` (the orignal `DefMap` we started
+ // with), resolve the remaining path segments in that `DefMap`.
+ let path =
+ ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned());
+ return def_map.resolve_path_fp_with_macro(
+ db,
+ mode,
+ local_id,
+ &path,
+ shadow,
+ expected_macro_subns,
+ );
+ }
+
+ PerNs::types(module.into(), Visibility::Public)
}
PathKind::Abs => {
// 2018-style absolute path -- only extern prelude
@@ -508,3 +514,27 @@ impl DefMap {
}
}
}
+
+/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to.
+fn adjust_to_nearest_non_block_module(
+ db: &dyn DefDatabase,
+ def_map: &DefMap,
+ mut local_id: LocalModuleId,
+) -> (Arc<DefMap>, LocalModuleId) {
+ // INVARIANT: `local_id` in `def_map` must be a block module.
+ stdx::always!(def_map.module_id(local_id).is_block_module());
+
+ let mut ext;
+ // This needs to be a local variable due to our mighty lifetime.
+ let mut def_map = def_map;
+ loop {
+ let BlockInfo { parent, .. } = def_map.block.expect("block module without parent module");
+
+ ext = parent.def_map(db, def_map.krate);
+ def_map = &ext;
+ local_id = parent.local_id;
+ if !parent.is_block_module() {
+ return (ext, local_id);
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
index 4931c36bb..40d3a1654 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -213,17 +213,17 @@ pub type Ty = ();
for (_, res) in module_data.scope.resolutions() {
match res.values.or(res.types).unwrap().0 {
- ModuleDefId::FunctionId(f) => drop(db.function_data(f)),
+ ModuleDefId::FunctionId(f) => _ = db.function_data(f),
ModuleDefId::AdtId(adt) => match adt {
- AdtId::StructId(it) => drop(db.struct_data(it)),
- AdtId::UnionId(it) => drop(db.union_data(it)),
- AdtId::EnumId(it) => drop(db.enum_data(it)),
+ AdtId::StructId(it) => _ = db.struct_data(it),
+ AdtId::UnionId(it) => _ = db.union_data(it),
+ AdtId::EnumId(it) => _ = db.enum_data(it),
},
- ModuleDefId::ConstId(it) => drop(db.const_data(it)),
- ModuleDefId::StaticId(it) => drop(db.static_data(it)),
- ModuleDefId::TraitId(it) => drop(db.trait_data(it)),
- ModuleDefId::TraitAliasId(it) => drop(db.trait_alias_data(it)),
- ModuleDefId::TypeAliasId(it) => drop(db.type_alias_data(it)),
+ ModuleDefId::ConstId(it) => _ = db.const_data(it),
+ ModuleDefId::StaticId(it) => _ = db.static_data(it),
+ ModuleDefId::TraitId(it) => _ = db.trait_data(it),
+ ModuleDefId::TraitAliasId(it) => _ = db.trait_alias_data(it),
+ ModuleDefId::TypeAliasId(it) => _ = db.type_alias_data(it),
ModuleDefId::EnumVariantId(_)
| ModuleDefId::ModuleId(_)
| ModuleDefId::MacroId(_)
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
index ff4ae6954..06530cc7e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -45,7 +45,7 @@ pub enum Path {
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
},
- /// A link to a lang item. It is used in desugaring of things like `x?`. We can show these
+ /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
/// links via a normal path since they might be private and not accessible in the usage place.
LangItem(LangItemTarget),
}
@@ -135,10 +135,7 @@ impl Path {
pub fn segments(&self) -> PathSegments<'_> {
let Path::Normal { mod_path, generic_args, .. } = self else {
- return PathSegments {
- segments: &[],
- generic_args: None,
- };
+ return PathSegments { segments: &[], generic_args: None };
};
let s =
PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() };
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
index 1cb17ff0d..abd817893 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -74,8 +74,8 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
- Path::from_src(trait_ref.path()?, ctx)? else
- {
+ Path::from_src(trait_ref.path()?, ctx)?
+ else {
return None;
};
let num_segments = mod_path.segments().len();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
index 0aead6f37..11d58a6ba 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
@@ -12,8 +12,8 @@ use crate::{
};
pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
- if let Path::LangItem(x) = path {
- return write!(buf, "$lang_item::{x:?}");
+ if let Path::LangItem(it) = path {
+ return write!(buf, "$lang_item::{it:?}");
}
match path.type_anchor() {
Some(anchor) => {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index 0d6f55411..b112c1070 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -22,10 +22,10 @@ use crate::{
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
AdtId, AssocItemId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId,
- EnumVariantId, ExternBlockId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId,
- ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId,
- ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
- TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId,
+ EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
+ HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId,
+ MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, UseId, VariantId,
};
#[derive(Debug, Clone)]
@@ -186,12 +186,12 @@ impl Resolver {
Path::LangItem(l) => {
return Some((
match *l {
- LangItemTarget::Union(x) => TypeNs::AdtId(x.into()),
- LangItemTarget::TypeAlias(x) => TypeNs::TypeAliasId(x),
- LangItemTarget::Struct(x) => TypeNs::AdtId(x.into()),
- LangItemTarget::EnumVariant(x) => TypeNs::EnumVariantId(x),
- LangItemTarget::EnumId(x) => TypeNs::AdtId(x.into()),
- LangItemTarget::Trait(x) => TypeNs::TraitId(x),
+ LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
+ LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it),
+ LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()),
+ LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it),
+ LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()),
+ LangItemTarget::Trait(it) => TypeNs::TraitId(it),
LangItemTarget::Function(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
@@ -273,10 +273,10 @@ impl Resolver {
Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => {
return Some(ResolveValueResult::ValueNs(match *l {
- LangItemTarget::Function(x) => ValueNs::FunctionId(x),
- LangItemTarget::Static(x) => ValueNs::StaticId(x),
- LangItemTarget::Struct(x) => ValueNs::StructId(x),
- LangItemTarget::EnumVariant(x) => ValueNs::EnumVariantId(x),
+ LangItemTarget::Function(it) => ValueNs::FunctionId(it),
+ LangItemTarget::Static(it) => ValueNs::StaticId(it),
+ LangItemTarget::Struct(it) => ValueNs::StructId(it),
+ LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it),
LangItemTarget::Union(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::TypeAlias(_)
@@ -425,14 +425,14 @@ impl Resolver {
/// The shadowing is accounted for: in
///
/// ```
- /// let x = 92;
+ /// let it = 92;
/// {
- /// let x = 92;
+ /// let it = 92;
/// $0
/// }
/// ```
///
- /// there will be only one entry for `x` in the result.
+ /// there will be only one entry for `it` in the result.
///
/// The result is ordered *roughly* from the innermost scope to the
/// outermost: when the name is introduced in two namespaces in two scopes,
@@ -1018,20 +1018,32 @@ impl HasResolver for ExternBlockId {
}
}
+impl HasResolver for ExternCrateId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
+impl HasResolver for UseId {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ self.lookup(db).container.resolver(db)
+ }
+}
+
impl HasResolver for TypeOwnerId {
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
match self {
- TypeOwnerId::FunctionId(x) => x.resolver(db),
- TypeOwnerId::StaticId(x) => x.resolver(db),
- TypeOwnerId::ConstId(x) => x.resolver(db),
- TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.resolver(db),
- TypeOwnerId::AdtId(x) => x.resolver(db),
- TypeOwnerId::TraitId(x) => x.resolver(db),
- TypeOwnerId::TraitAliasId(x) => x.resolver(db),
- TypeOwnerId::TypeAliasId(x) => x.resolver(db),
- TypeOwnerId::ImplId(x) => x.resolver(db),
- TypeOwnerId::EnumVariantId(x) => x.resolver(db),
- TypeOwnerId::ModuleId(x) => x.resolver(db),
+ TypeOwnerId::FunctionId(it) => it.resolver(db),
+ TypeOwnerId::StaticId(it) => it.resolver(db),
+ TypeOwnerId::ConstId(it) => it.resolver(db),
+ TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.resolver(db),
+ TypeOwnerId::AdtId(it) => it.resolver(db),
+ TypeOwnerId::TraitId(it) => it.resolver(db),
+ TypeOwnerId::TraitAliasId(it) => it.resolver(db),
+ TypeOwnerId::TypeAliasId(it) => it.resolver(db),
+ TypeOwnerId::ImplId(it) => it.resolver(db),
+ TypeOwnerId::EnumVariantId(it) => it.resolver(db),
+ TypeOwnerId::ModuleId(it) => it.resolver(db),
}
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 40d8659f2..1f27204c1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -16,11 +16,9 @@ cov-mark = "2.0.0-pre.1"
tracing = "0.1.35"
either = "1.7.0"
rustc-hash = "1.1.0"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
itertools = "0.10.5"
-hashbrown = { version = "0.12.1", features = [
- "inline-more",
-], default-features = false }
+hashbrown.workspace = true
smallvec.workspace = true
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
index c2b0d5985..1906ed15b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs
@@ -18,47 +18,89 @@ use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file.
-pub struct FileAstId<N: AstNode> {
+pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
covariant: PhantomData<fn() -> N>,
}
-impl<N: AstNode> Clone for FileAstId<N> {
+impl<N: AstIdNode> Clone for FileAstId<N> {
fn clone(&self) -> FileAstId<N> {
*self
}
}
-impl<N: AstNode> Copy for FileAstId<N> {}
+impl<N: AstIdNode> Copy for FileAstId<N> {}
-impl<N: AstNode> PartialEq for FileAstId<N> {
+impl<N: AstIdNode> PartialEq for FileAstId<N> {
fn eq(&self, other: &Self) -> bool {
self.raw == other.raw
}
}
-impl<N: AstNode> Eq for FileAstId<N> {}
-impl<N: AstNode> Hash for FileAstId<N> {
+impl<N: AstIdNode> Eq for FileAstId<N> {}
+impl<N: AstIdNode> Hash for FileAstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.raw.hash(hasher);
}
}
-impl<N: AstNode> fmt::Debug for FileAstId<N> {
+impl<N: AstIdNode> fmt::Debug for FileAstId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
}
}
-impl<N: AstNode> FileAstId<N> {
+impl<N: AstIdNode> FileAstId<N> {
// Can't make this a From implementation because of coherence
- pub fn upcast<M: AstNode>(self) -> FileAstId<M>
+ pub fn upcast<M: AstIdNode>(self) -> FileAstId<M>
where
N: Into<M>,
{
FileAstId { raw: self.raw, covariant: PhantomData }
}
+
+ pub fn erase(self) -> ErasedFileAstId {
+ self.raw
+ }
}
-type ErasedFileAstId = Idx<SyntaxNodePtr>;
+pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
+
+pub trait AstIdNode: AstNode {}
+macro_rules! register_ast_id_node {
+ (impl AstIdNode for $($ident:ident),+ ) => {
+ $(
+ impl AstIdNode for ast::$ident {}
+ )+
+ fn should_alloc_id(kind: syntax::SyntaxKind) -> bool {
+ $(
+ ast::$ident::can_cast(kind)
+ )||+
+ }
+ };
+}
+register_ast_id_node! {
+ impl AstIdNode for
+ Item,
+ Adt,
+ Enum,
+ Struct,
+ Union,
+ Const,
+ ExternBlock,
+ ExternCrate,
+ Fn,
+ Impl,
+ Macro,
+ MacroDef,
+ MacroRules,
+ MacroCall,
+ Module,
+ Static,
+ Trait,
+ TraitAlias,
+ TypeAlias,
+ Use,
+ AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
+}
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
#[derive(Default)]
@@ -92,14 +134,7 @@ impl AstIdMap {
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
bdfs(node, |it| {
- let kind = it.kind();
- if ast::Item::can_cast(kind)
- || ast::BlockExpr::can_cast(kind)
- || ast::Variant::can_cast(kind)
- || ast::RecordField::can_cast(kind)
- || ast::TupleField::can_cast(kind)
- || ast::ConstArg::can_cast(kind)
- {
+ if should_alloc_id(it.kind()) {
res.alloc(&it);
true
} else {
@@ -120,15 +155,19 @@ impl AstIdMap {
res
}
- pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
+ pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
}
- pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
+ pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
+ pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
+ self.arena[id].clone()
+ }
+
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
let ptr = SyntaxNodePtr::new(item);
let hash = hash_ptr(&ptr);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 80695bc06..4ee12e2f2 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -35,7 +35,7 @@ macro_rules! register_builtin {
impl BuiltinAttrExpander {
pub fn is_derive(self) -> bool {
- matches!(self, BuiltinAttrExpander::Derive)
+ matches!(self, BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst)
}
pub fn is_test(self) -> bool {
matches!(self, BuiltinAttrExpander::Test)
@@ -50,6 +50,8 @@ register_builtin! {
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
(derive, Derive) => derive_attr_expand,
+ // derive const is equivalent to derive for our proposes.
+ (derive_const, DeriveConst) => derive_attr_expand,
(global_allocator, GlobalAllocator) => dummy_attr_expand,
(test, Test) => dummy_attr_expand,
(test_case, TestCase) => dummy_attr_expand
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index 3d1e272b9..ecc8b407a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -12,9 +12,7 @@ use crate::{
name::{AsName, Name},
tt::{self, TokenId},
};
-use syntax::ast::{
- self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds,
-};
+use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
@@ -30,12 +28,13 @@ macro_rules! register_builtin {
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ token_map: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
- expander(db, id, tt)
+ expander(db, id, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@@ -72,12 +71,12 @@ enum VariantShape {
}
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
- (0..n).map(|x| Ident::new(format!("f{x}"), tt::TokenId::unspecified()))
+ (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
}
impl VariantShape {
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
- self.as_pattern_map(path, |x| quote!(#x))
+ self.as_pattern_map(path, |it| quote!(#it))
}
fn field_names(&self) -> Vec<tt::Ident> {
@@ -95,17 +94,17 @@ impl VariantShape {
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
- let fields = fields.iter().map(|x| {
- let mapped = field_map(x);
- quote! { #x : #mapped , }
+ let fields = fields.iter().map(|it| {
+ let mapped = field_map(it);
+ quote! { #it : #mapped , }
});
quote! {
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
- let fields = tuple_field_iterator(n).map(|x| {
- let mapped = field_map(&x);
+ let fields = tuple_field_iterator(n).map(|it| {
+ let mapped = field_map(&it);
quote! {
#mapped ,
}
@@ -118,16 +117,16 @@ impl VariantShape {
}
}
- fn from(value: Option<FieldList>, token_map: &TokenMap) -> Result<Self, ExpandError> {
+ fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
- Some(FieldList::RecordFieldList(x)) => VariantShape::Struct(
- x.fields()
- .map(|x| x.name())
- .map(|x| name_to_token(token_map, x))
+ Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
+ it.fields()
+ .map(|it| it.name())
+ .map(|it| name_to_token(tm, it))
.collect::<Result<_, _>>()?,
),
- Some(FieldList::TupleFieldList(x)) => VariantShape::Tuple(x.fields().count()),
+ Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()),
};
Ok(r)
}
@@ -141,7 +140,7 @@ enum AdtShape {
impl AdtShape {
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
- self.as_pattern_map(name, |x| quote!(#x))
+ self.as_pattern_map(name, |it| quote!(#it))
}
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
@@ -190,32 +189,19 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
-fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
- let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
- let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
- debug!("derive node didn't parse");
- ExpandError::other("invalid item definition")
- })?;
- let item = macro_items.items().next().ok_or_else(|| {
- debug!("no module item parsed");
- ExpandError::other("no item found")
- })?;
- let adt = ast::Adt::cast(item.syntax().clone()).ok_or_else(|| {
- debug!("expected adt, found: {:?}", item);
- ExpandError::other("expected struct, enum or union")
- })?;
+fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match &adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
- AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?),
+ AdtShape::Struct(VariantShape::from(tm, it.field_list())?),
),
ast::Adt::Enum(it) => {
let default_variant = it
.variant_list()
.into_iter()
- .flat_map(|x| x.variants())
- .position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into())));
+ .flat_map(|it| it.variants())
+ .position(|it| it.attrs().any(|it| it.simple_name() == Some("default".into())));
(
it.name(),
it.generic_param_list(),
@@ -224,11 +210,11 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
variants: it
.variant_list()
.into_iter()
- .flat_map(|x| x.variants())
- .map(|x| {
+ .flat_map(|it| it.variants())
+ .map(|it| {
Ok((
- name_to_token(&token_map, x.name())?,
- VariantShape::from(x.field_list(), &token_map)?,
+ name_to_token(tm, it.name())?,
+ VariantShape::from(tm, it.field_list())?,
))
})
.collect::<Result<_, ExpandError>>()?,
@@ -246,16 +232,16 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let name = {
let this = param.name();
match this {
- Some(x) => {
- param_type_set.insert(x.as_name());
- mbe::syntax_node_to_token_tree(x.syntax()).0
+ Some(it) => {
+ param_type_set.insert(it.as_name());
+ mbe::syntax_node_to_token_tree(it.syntax()).0
}
None => tt::Subtree::empty(),
}
};
let bounds = match &param {
- ast::TypeOrConstParam::Type(x) => {
- x.type_bound_list().map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
+ ast::TypeOrConstParam::Type(it) => {
+ it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
}
ast::TypeOrConstParam::Const(_) => None,
};
@@ -296,9 +282,9 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
- .map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0)
+ .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
.collect();
- let name_token = name_to_token(&token_map, name)?;
+ let name_token = name_to_token(&tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
@@ -345,11 +331,12 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
- let info = match parse_adt(tt) {
+ let info = match parse_adt(tm, tt) {
Ok(info) => info,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
};
@@ -373,10 +360,10 @@ fn expand_simple_derive(
})
.unzip();
- where_block.extend(info.associated_types.iter().map(|x| {
- let x = x.clone();
+ where_block.extend(info.associated_types.iter().map(|it| {
+ let it = it.clone();
let bound = trait_path.clone();
- quote! { #x : #bound , }
+ quote! { #it : #bound , }
}));
let name = info.name;
@@ -405,19 +392,21 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::marker::Copy }, |_| quote! {})
+ expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::clone::Clone }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct {
char: '*',
@@ -444,7 +433,7 @@ fn clone_expand(
}
let name = &adt.name;
let patterns = adt.shape.as_pattern(name);
- let exprs = adt.shape.as_pattern_map(name, |x| quote! { #x .clone() });
+ let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
let fat_arrow = fat_arrow();
quote! {
@@ -479,10 +468,11 @@ fn and_and() -> ::tt::Subtree<TokenId> {
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::default::Default }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
@@ -518,16 +508,17 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::fmt::Debug }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
- let for_fields = fields.iter().map(|x| {
- let x_string = x.to_string();
+ let for_fields = fields.iter().map(|it| {
+ let x_string = it.to_string();
quote! {
- .field(#x_string, & #x)
+ .field(#x_string, & #it)
}
});
quote! {
@@ -535,9 +526,9 @@ fn debug_expand(
}
}
VariantShape::Tuple(n) => {
- let for_fields = tuple_field_iterator(*n).map(|x| {
+ let for_fields = tuple_field_iterator(*n).map(|it| {
quote! {
- .field( & #x)
+ .field( & #it)
}
});
quote! {
@@ -598,10 +589,11 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::hash::Hash }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
@@ -621,7 +613,7 @@ fn hash_expand(
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|(pat, names)| {
let expr = {
- let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); });
+ let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
quote! { {
##it
} }
@@ -632,9 +624,14 @@ fn hash_expand(
}
},
);
+ let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
+ quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
+ } else {
+ quote! {}
+ };
quote! {
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
- #krate::mem::discriminant(self).hash(ra_expand_state);
+ #check_discriminant
match self {
##arms
}
@@ -646,19 +643,21 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Eq }, |_| quote! {})
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
@@ -674,9 +673,9 @@ fn partial_eq_expand(
quote!(true)
}
[first, rest @ ..] => {
- let rest = rest.iter().map(|x| {
- let t1 = Ident::new(format!("{}_self", x.text), x.span);
- let t2 = Ident::new(format!("{}_other", x.text), x.span);
+ let rest = rest.iter().map(|it| {
+ let t1 = Ident::new(format!("{}_self", it.text), it.span);
+ let t2 = Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and();
quote!(#and_and #t1 .eq( #t2 ))
});
@@ -708,12 +707,12 @@ fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
- let self_patterns = adt.shape.as_pattern_map(name, |x| {
- let t = Ident::new(format!("{}_self", x.text), x.span);
+ let self_patterns = adt.shape.as_pattern_map(name, |it| {
+ let t = Ident::new(format!("{}_self", it.text), it.span);
quote!(#t)
});
- let other_patterns = adt.shape.as_pattern_map(name, |x| {
- let t = Ident::new(format!("{}_other", x.text), x.span);
+ let other_patterns = adt.shape.as_pattern_map(name, |it| {
+ let t = Ident::new(format!("{}_other", it.text), it.span);
quote!(#t)
});
(self_patterns, other_patterns)
@@ -722,10 +721,11 @@ fn self_and_other_patterns(
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::Ord }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
@@ -747,9 +747,6 @@ fn ord_expand(
// FIXME: Return expand error here
return quote!();
}
- let left = quote!(#krate::intrinsics::discriminant_value(self));
- let right = quote!(#krate::intrinsics::discriminant_value(other));
-
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
|(pat1, pat2, fields)| {
@@ -764,17 +761,17 @@ fn ord_expand(
},
);
let fat_arrow = fat_arrow();
- let body = compare(
- krate,
- left,
- right,
- quote! {
- match (self, other) {
- ##arms
- _unused #fat_arrow #krate::cmp::Ordering::Equal
- }
- },
- );
+ let mut body = quote! {
+ match (self, other) {
+ ##arms
+ _unused #fat_arrow #krate::cmp::Ordering::Equal
+ }
+ };
+ if matches!(&adt.shape, AdtShape::Enum { .. }) {
+ let left = quote!(#krate::intrinsics::discriminant_value(self));
+ let right = quote!(#krate::intrinsics::discriminant_value(other));
+ body = compare(krate, left, right, body);
+ }
quote! {
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
@@ -786,10 +783,11 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
- tt: &tt::Subtree,
+ tt: &ast::Adt,
+ tm: &TokenMap,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
- expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }, |adt| {
+ expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index a9f0c154b..95c6baf42 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -339,7 +339,7 @@ fn format_args_expand_general(
parts.push(mem::take(&mut last_part));
let arg_tree = if argument.is_empty() {
match args.next() {
- Some(x) => x,
+ Some(it) => it,
None => {
err = Some(mbe::ExpandError::NoMatchingRule.into());
tt::Subtree::empty()
@@ -361,7 +361,7 @@ fn format_args_expand_general(
quote!(::core::fmt::Display::fmt)
}
};
- arg_tts.push(quote! { ::core::fmt::Argument::new(&(#arg_tree), #formatter), });
+ arg_tts.push(quote! { ::core::fmt::ArgumentV1::new(&(#arg_tree), #formatter), });
}
'}' => {
if format_iter.peek() == Some(&'}') {
@@ -378,11 +378,11 @@ fn format_args_expand_general(
if !last_part.is_empty() {
parts.push(last_part);
}
- let part_tts = parts.into_iter().map(|x| {
+ let part_tts = parts.into_iter().map(|it| {
let text = if let Some(raw) = &raw_sharps {
- format!("r{raw}\"{}\"{raw}", x).into()
+ format!("r{raw}\"{}\"{raw}", it).into()
} else {
- format!("\"{}\"", x).into()
+ format!("\"{}\"", it).into()
};
let l = tt::Literal { span: tt::TokenId::unspecified(), text };
quote!(#l ,)
@@ -574,7 +574,7 @@ fn concat_bytes_expand(
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
syntax::SyntaxKind::BYTE_STRING => {
let components = unquote_byte_string(lit).unwrap_or_default();
- components.into_iter().for_each(|x| bytes.push(x.to_string()));
+ components.into_iter().for_each(|it| bytes.push(it.to_string()));
}
_ => {
err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
@@ -692,7 +692,7 @@ pub(crate) fn include_arg_to_tt(
arg_id: MacroCallId,
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
let loc = db.lookup_intern_macro_call(arg_id);
- let Some(EagerCallInfo {arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else {
+ let Some(EagerCallInfo { arg,arg_id, .. }) = loc.eager.as_deref() else {
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
};
let path = parse_string(&arg.0)?;
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 78b2db730..5292a5fa1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,9 +1,9 @@
//! Defines database & queries for macro expansion.
-use base_db::{salsa, Edition, SourceDatabase};
+use base_db::{salsa, CrateId, Edition, SourceDatabase};
use either::Either;
use limit::Limit;
-use mbe::syntax_node_to_token_tree;
+use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, HasAttrs, HasDocComments},
@@ -13,7 +13,7 @@ use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
- builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander,
+ builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander,
@@ -28,61 +28,67 @@ use crate::{
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
+/// Old-style `macro_rules` or the new macros 2.0
+pub struct DeclarativeMacroExpander {
+ pub mac: mbe::DeclarativeMacro,
+ pub def_site_token_map: mbe::TokenMap,
+}
+
+impl DeclarativeMacroExpander {
+ pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
+ match self.mac.err() {
+ Some(e) => ExpandResult::new(
+ tt::Subtree::empty(),
+ ExpandError::other(format!("invalid macro definition: {e}")),
+ ),
+ None => self.mac.expand(tt).map_err(Into::into),
+ }
+ }
+
+ pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
+ self.mac.map_id_down(token_id)
+ }
+
+ pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
+ self.mac.map_id_up(token_id)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
- /// Old-style `macro_rules` or the new macros 2.0
- DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap },
+ DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
- Builtin(BuiltinFnLikeExpander),
+ BuiltIn(BuiltinFnLikeExpander),
/// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.)
- BuiltinEager(EagerExpander),
+ BuiltInEager(EagerExpander),
/// `global_allocator` and such.
- BuiltinAttr(BuiltinAttrExpander),
+ BuiltInAttr(BuiltinAttrExpander),
/// `derive(Copy)` and such.
- BuiltinDerive(BuiltinDeriveExpander),
+ BuiltInDerive(BuiltinDeriveExpander),
/// The thing we love the most here in rust-analyzer -- procedural macros.
ProcMacro(ProcMacroExpander),
}
+// FIXME: Get rid of these methods
impl TokenExpander {
- fn expand(
- &self,
- db: &dyn ExpandDatabase,
- id: MacroCallId,
- tt: &tt::Subtree,
- ) -> ExpandResult<tt::Subtree> {
- match self {
- TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
- TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
- TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into),
- TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
- TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
- TokenExpander::ProcMacro(_) => {
- // We store the result in salsa db to prevent non-deterministic behavior in
- // some proc-macro implementation
- // See #4315 for details
- db.expand_proc_macro(id)
- }
- }
- }
-
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self {
- TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id),
- TokenExpander::Builtin(..)
- | TokenExpander::BuiltinEager(..)
- | TokenExpander::BuiltinAttr(..)
- | TokenExpander::BuiltinDerive(..)
+ TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
+ TokenExpander::BuiltIn(..)
+ | TokenExpander::BuiltInEager(..)
+ | TokenExpander::BuiltInAttr(..)
+ | TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
- TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id),
- TokenExpander::Builtin(..)
- | TokenExpander::BuiltinEager(..)
- | TokenExpander::BuiltinAttr(..)
- | TokenExpander::BuiltinDerive(..)
+ TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
+ TokenExpander::BuiltIn(..)
+ | TokenExpander::BuiltInEager(..)
+ | TokenExpander::BuiltInAttr(..)
+ | TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
}
}
@@ -118,14 +124,26 @@ pub trait ExpandDatabase: SourceDatabase {
fn macro_arg(
&self,
id: MacroCallId,
- ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
+ ) -> ValueResult<
+ Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
+ Arc<Box<[SyntaxError]>>,
+ >;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
- fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
- /// Gets the expander for this macro. This compiles declarative macros, and
- /// just fetches procedural ones.
- fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>;
+ fn macro_arg_node(
+ &self,
+ id: MacroCallId,
+ ) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
+ /// Fetches the expander for this macro.
+ #[salsa::transparent]
+ fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
+ /// Fetches (and compiles) the expander of this decl macro.
+ fn decl_macro_expander(
+ &self,
+ def_crate: CrateId,
+ id: AstId<ast::Macro>,
+ ) -> Arc<DeclarativeMacroExpander>;
/// Expand macro call to a token tree.
// This query is LRU cached
@@ -141,8 +159,8 @@ pub trait ExpandDatabase: SourceDatabase {
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
- /// @edwin0cheng heroically debugged this once!
- fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
+ /// @edwin0cheng heroically debugged this once! See #4315 for details
+ fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
/// Firewall query that returns the errors from the `parse_macro_expansion` query.
fn parse_macro_expansion_error(
&self,
@@ -163,7 +181,6 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
- let macro_def = db.macro_def(loc.def).ok()?;
let token_range = token_to_map.text_range();
// Build the subtree and token mapping for the speculative args
@@ -221,7 +238,12 @@ pub fn expand_speculative(
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
- macro_def.map_id_down(token_id)
+ match loc.def.kind {
+ MacroDefKind::Declarative(it) => {
+ db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
+ }
+ _ => token_id,
+ }
}
};
@@ -235,7 +257,17 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
}
- _ => macro_def.expand(db, actual_macro_call, &tt),
+ MacroDefKind::BuiltInDerive(expander, ..) => {
+ // this cast is a bit sus, can we avoid losing the typedness here?
+ let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
+ expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
+ }
+ MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
+ MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
+ MacroDefKind::BuiltInEager(it, _) => {
+ it.expand(db, actual_macro_call, &tt).map_err(Into::into)
+ }
+ MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
let expand_to = macro_expand_to(db, actual_macro_call);
@@ -297,17 +329,31 @@ fn parse_macro_expansion(
ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
}
+fn parse_macro_expansion_error(
+ db: &dyn ExpandDatabase,
+ macro_call_id: MacroCallId,
+) -> ExpandResult<Box<[SyntaxError]>> {
+ db.parse_macro_expansion(MacroFile { macro_call_id })
+ .map(|it| it.0.errors().to_vec().into_boxed_slice())
+}
+
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
-) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
+) -> ValueResult<
+ Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
+ Arc<Box<[SyntaxError]>>,
+> {
let loc = db.lookup_intern_macro_call(id);
- if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() {
- return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())));
+ if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
+ return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
}
- let arg = db.macro_arg_text(id)?;
+ let ValueResult { value, err } = db.macro_arg_node(id);
+ let Some(arg) = value else {
+ return ValueResult { value: None, err };
+ };
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
@@ -325,9 +371,16 @@ fn macro_arg(
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::unspecified();
}
- Some(Arc::new((tt, tmap, fixups.undo_info)))
+ let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
+ match err {
+ Some(err) => ValueResult::new(val, err),
+ None => ValueResult::ok(val),
+ }
}
+/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
+/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
+/// - attributes expect the invoking attribute to be stripped
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
// FIXME: handle `cfg_attr`
(|| {
@@ -364,9 +417,43 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
-fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
+fn macro_arg_node(
+ db: &dyn ExpandDatabase,
+ id: MacroCallId,
+) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
+ let err = || -> Arc<Box<[_]>> {
+ Arc::new(Box::new([SyntaxError::new_at_offset(
+ "invalid macro call".to_owned(),
+ syntax::TextSize::from(0),
+ )]))
+ };
let loc = db.lookup_intern_macro_call(id);
- let arg = loc.kind.arg(db)?;
+ let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
+ let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
+ Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
+ } else {
+ loc.kind
+ .arg(db)
+ .and_then(|arg| ast::TokenTree::cast(arg.value))
+ .map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
+ };
+ match res {
+ Some(res) if res.errors().is_empty() => res.syntax_node(),
+ Some(res) => {
+ return ValueResult::new(
+ Some(res.syntax_node().green().into()),
+ // Box::<[_]>::from(res.errors()), not stable yet
+ Arc::new(res.errors().to_vec().into_boxed_slice()),
+ );
+ }
+ None => return ValueResult::only_err(err()),
+ }
+ } else {
+ match loc.kind.arg(db) {
+ Some(res) => res.value,
+ None => return ValueResult::only_err(err()),
+ }
+ };
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
@@ -381,101 +468,146 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
- return None;
+ return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
+ "unbalanced token tree".to_owned(),
+ arg.text_range(),
+ )])));
}
}
- if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
- Some(
- mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr)
- .0
- .syntax_node()
- .green()
- .into(),
- )
- } else {
- Some(arg.green().into())
- }
+ ValueResult::ok(Some(arg.green().into()))
}
-fn macro_def(
+fn decl_macro_expander(
db: &dyn ExpandDatabase,
- id: MacroDefId,
-) -> Result<Arc<TokenExpander>, mbe::ParseError> {
+ def_crate: CrateId,
+ id: AstId<ast::Macro>,
+) -> Arc<DeclarativeMacroExpander> {
+ let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
+ let (mac, def_site_token_map) = match id.to_node(db) {
+ ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
+ Some(arg) => {
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
+ (mac, def_site_token_map)
+ }
+ None => (
+ mbe::DeclarativeMacro::from_err(
+ mbe::ParseError::Expected("expected a token tree".into()),
+ is_2021,
+ ),
+ Default::default(),
+ ),
+ },
+ ast::Macro::MacroDef(macro_def) => match macro_def.body() {
+ Some(arg) => {
+ let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
+ let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
+ (mac, def_site_token_map)
+ }
+ None => (
+ mbe::DeclarativeMacro::from_err(
+ mbe::ParseError::Expected("expected a token tree".into()),
+ is_2021,
+ ),
+ Default::default(),
+ ),
+ },
+ };
+ Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
+}
+
+fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
match id.kind {
MacroDefKind::Declarative(ast_id) => {
- let is_2021 = db.crate_graph()[id.krate].edition >= Edition::Edition2021;
- let (mac, def_site_token_map) = match ast_id.to_node(db) {
- ast::Macro::MacroRules(macro_rules) => {
- let arg = macro_rules
- .token_tree()
- .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021)?;
- (mac, def_site_token_map)
- }
- ast::Macro::MacroDef(macro_def) => {
- let arg = macro_def
- .body()
- .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?;
- let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
- let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021)?;
- (mac, def_site_token_map)
- }
- };
- Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map }))
- }
- MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))),
- MacroDefKind::BuiltInAttr(expander, _) => {
- Ok(Arc::new(TokenExpander::BuiltinAttr(expander)))
+ TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
}
- MacroDefKind::BuiltInDerive(expander, _) => {
- Ok(Arc::new(TokenExpander::BuiltinDerive(expander)))
- }
- MacroDefKind::BuiltInEager(expander, ..) => {
- Ok(Arc::new(TokenExpander::BuiltinEager(expander)))
- }
- MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))),
+ MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander),
+ MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander),
+ MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander),
+ MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander),
+ MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander),
}
}
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
- if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() {
- // This is an input expansion for an eager macro. These are already pre-expanded
- return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() };
- }
- let expander = match db.macro_def(loc.def) {
- Ok(it) => it,
- // FIXME: We should make sure to enforce a variant that invalid macro
- // definitions do not get expanders that could reach this call path!
- Err(err) => {
- return ExpandResult {
- value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: vec![],
- }),
- err: Some(ExpandError::other(format!("invalid macro definition: {err}"))),
- }
+
+ let ExpandResult { value: tt, mut err } = match loc.def.kind {
+ MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
+ MacroDefKind::BuiltInDerive(expander, ..) => {
+ let arg = db.macro_arg_node(id).value.unwrap();
+
+ let node = SyntaxNode::new_root(arg);
+ let censor = censor_for_macro_input(&loc, &node);
+ let mut fixups = fixup::fixup_syntax(&node);
+ fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
+ let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
+ &node,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ // this cast is a bit sus, can we avoid losing the typedness here?
+ let adt = ast::Adt::cast(node).unwrap();
+ let mut res = expander.expand(db, id, &adt, &tmap);
+ fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
+ res
+ }
+ _ => {
+ let ValueResult { value, err } = db.macro_arg(id);
+ let Some(macro_arg) = value else {
+ return ExpandResult {
+ value: Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: Vec::new(),
+ }),
+ // FIXME: We should make sure to enforce an invariant that invalid macro
+ // calls do not reach this call path!
+ err: Some(ExpandError::other("invalid token tree")),
+ };
+ };
+
+ let (arg, arg_tm, undo_info) = &*macro_arg;
+ let mut res = match loc.def.kind {
+ MacroDefKind::Declarative(id) => {
+ db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
+ }
+ MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
+ // This might look a bit odd, but we do not expand the inputs to eager macros here.
+ // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
+ // That kind of expansion uses the ast id map of an eager macros input though which goes through
+ // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
+ // will end up going through here again, whereas we want to just want to inspect the raw input.
+ // As such we just return the input subtree here.
+ MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
+ let mut arg = arg.clone();
+ fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
+
+ return ExpandResult {
+ value: Arc::new(arg),
+ err: err.map(|err| {
+ let mut buf = String::new();
+ for err in &**err {
+ use std::fmt::Write;
+ _ = write!(buf, "{}, ", err);
+ }
+ buf.pop();
+ buf.pop();
+ ExpandError::other(buf)
+ }),
+ };
+ }
+ MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
+ MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
+ _ => unreachable!(),
+ };
+ fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
+ res
}
};
- let Some(macro_arg) = db.macro_arg(id) else {
- return ExpandResult {
- value: Arc::new(
- tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: Vec::new(),
- },
- ),
- // FIXME: We should make sure to enforce a variant that invalid macro
- // calls do not reach this call path!
- err: Some(ExpandError::other(
- "invalid token tree"
- )),
- };
- };
- let (arg_tt, arg_tm, undo_info) = &*macro_arg;
- let ExpandResult { value: mut tt, mut err } = expander.expand(db, id, arg_tt);
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
// FIXME: We should report both errors!
@@ -483,48 +615,29 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}
// Set a hard limit for the expanded tt
- let count = tt.count();
- if TOKEN_LIMIT.check(count).is_err() {
- return ExpandResult {
- value: Arc::new(tt::Subtree {
- delimiter: tt::Delimiter::UNSPECIFIED,
- token_trees: vec![],
- }),
- err: Some(ExpandError::other(format!(
- "macro invocation exceeds token limit: produced {} tokens, limit is {}",
- count,
- TOKEN_LIMIT.inner(),
- ))),
- };
+ if let Err(value) = check_tt_count(&tt) {
+ return value;
}
- fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
-
ExpandResult { value: Arc::new(tt), err }
}
-fn parse_macro_expansion_error(
- db: &dyn ExpandDatabase,
- macro_call_id: MacroCallId,
-) -> ExpandResult<Box<[SyntaxError]>> {
- db.parse_macro_expansion(MacroFile { macro_call_id })
- .map(|it| it.0.errors().to_vec().into_boxed_slice())
-}
-
-fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
+fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
- let Some(macro_arg) = db.macro_arg(id) else {
+ let Some(macro_arg) = db.macro_arg(id).value else {
return ExpandResult {
- value: tt::Subtree {
+ value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
token_trees: Vec::new(),
- },
- err: Some(ExpandError::other(
- "invalid token tree"
- )),
+ }),
+ // FIXME: We should make sure to enforce an invariant that invalid macro
+ // calls do not reach this call path!
+ err: Some(ExpandError::other("invalid token tree")),
};
};
+ let (arg_tt, arg_tm, undo_info) = &*macro_arg;
+
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
@@ -533,13 +646,23 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<t
let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
- mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
+ mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
Some(attr_args)
}
_ => None,
};
- expander.expand(db, loc.def.krate, loc.krate, &macro_arg.0, attr_arg.as_ref())
+ let ExpandResult { value: mut tt, err } =
+ expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
+
+ // Set a hard limit for the expanded tt
+ if let Err(value) = check_tt_count(&tt) {
+ return value;
+ }
+
+ fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
+
+ ExpandResult { value: Arc::new(tt), err }
}
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
@@ -563,3 +686,22 @@ fn token_tree_to_syntax_node(
};
mbe::token_tree_to_syntax_node(tt, entry_point)
}
+
+fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
+ let count = tt.count();
+ if TOKEN_LIMIT.check(count).is_err() {
+ Err(ExpandResult {
+ value: Arc::new(tt::Subtree {
+ delimiter: tt::Delimiter::UNSPECIFIED,
+ token_trees: vec![],
+ }),
+ err: Some(ExpandError::other(format!(
+ "macro invocation exceeds token limit: produced {} tokens, limit is {}",
+ count,
+ TOKEN_LIMIT.inner(),
+ ))),
+ })
+ } else {
+ Ok(())
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index 7ee3fd375..4110f2847 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -19,7 +19,8 @@
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
-use syntax::{ted, Parse, SyntaxNode};
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
@@ -28,7 +29,7 @@ use crate::{
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
- MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
+ MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@@ -37,20 +38,9 @@ pub fn expand_eager_macro_input(
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
- assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..)));
- let token_tree = macro_call.value.token_tree();
-
- let Some(token_tree) = token_tree else {
- return Ok(ExpandResult { value: None, err:
- Some(ExpandError::other(
- "invalid token tree"
- )),
- });
- };
- let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax());
-
+) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
+ // the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
let expand_to = ExpandTo::from_call_site(&macro_call.value);
@@ -61,47 +51,80 @@ pub fn expand_eager_macro_input(
let arg_id = db.intern_macro_call(MacroCallLoc {
def,
krate,
- eager: Some(Box::new(EagerCallInfo {
- arg: Arc::new((parsed_args, arg_token_map)),
- arg_id: None,
- error: None,
- })),
+ eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
});
- let arg_as_expr = match db.macro_arg_text(arg_id) {
- Some(it) => it,
- None => {
- return Ok(ExpandResult {
- value: None,
- err: Some(ExpandError::other("invalid token tree")),
- })
- }
+ let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
+ db.parse_macro_expansion(arg_id.as_macro_file());
+ // we need this map here as the expansion of the eager input fake file loses whitespace ...
+ let mut ws_mapping = FxHashMap::default();
+ if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
+ ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
+ Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
+ }));
+ }
+
+ let ExpandResult { value: expanded_eager_input, err } = {
+ eager_macro_recur(
+ db,
+ &Hygiene::new(db, macro_call.file_id),
+ InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
+ krate,
+ resolver,
+ )
};
- let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur(
- db,
- &Hygiene::new(db, macro_call.file_id),
- InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)),
- krate,
- resolver,
- )?;
- let Some(expanded_eager_input) = expanded_eager_input else {
- return Ok(ExpandResult { value: None, err })
+ let err = parse_err.or(err);
+
+ let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
+ return ExpandResult { value: None, err };
+ };
+
+ let (mut subtree, expanded_eager_input_token_map) =
+ mbe::syntax_node_to_token_tree(&expanded_eager_input);
+
+ let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
+ let mut ids_used = FxHashSet::default();
+ let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
+ // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
+ // so we need to remap them to the original input of the eager macro.
+ subtree.visit_ids(&mut |id| {
+ // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
+
+ if let Some(range) = expanded_eager_input_token_map
+ .first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
+ {
+ // remap from expanded eager input to eager input expansion
+ if let Some(og_range) = mapping.get(&range) {
+ // remap from eager input expansion to original eager input
+ if let Some(&og_range) = ws_mapping.get(og_range) {
+ if let Some(og_token) = og_tmap.token_by_range(og_range) {
+ ids_used.insert(og_token);
+ return og_token;
+ }
+ }
+ }
+ }
+ tt::TokenId::UNSPECIFIED
+ });
+ og_tmap.filter(|id| ids_used.contains(&id));
+ og_tmap
+ } else {
+ Default::default()
};
- let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input);
subtree.delimiter = crate::tt::Delimiter::unspecified();
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
- arg: Arc::new((subtree, token_map)),
- arg_id: Some(arg_id),
+ arg: Arc::new((subtree, og_tmap)),
+ arg_id,
error: err.clone(),
})),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
};
- Ok(ExpandResult { value: Some(db.intern_macro_call(loc)), err })
+ ExpandResult { value: Some(db.intern_macro_call(loc)), err }
}
fn lazy_expand(
@@ -109,19 +132,16 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
-) -> ExpandResult<InFile<Parse<SyntaxNode>>> {
+) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
- let id = def.as_lazy_macro(
- db,
- krate,
- MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
- );
-
+ let ast_id = macro_call.with_value(ast_id);
+ let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
let macro_file = id.as_macro_file();
- db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0))
+ db.parse_macro_expansion(macro_file)
+ .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
}
fn eager_macro_recur(
@@ -130,19 +150,51 @@ fn eager_macro_recur(
curr: InFile<SyntaxNode>,
krate: CrateId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> Result<ExpandResult<Option<SyntaxNode>>, UnresolvedMacro> {
+) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
let original = curr.value.clone_for_update();
+ let mut mapping = FxHashMap::default();
- let children = original.descendants().filter_map(ast::MacroCall::cast);
let mut replacements = Vec::new();
- // Note: We only report a single error inside of eager expansions
+ // FIXME: We only report a single error inside of eager expansions
let mut error = None;
+ let mut offset = 0i32;
+ let apply_offset = |it: TextSize, offset: i32| {
+ TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
+ };
+ let mut children = original.preorder_with_tokens();
// Collect replacement
- for child in children {
- let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
- Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
+ while let Some(child) = children.next() {
+ let WalkEvent::Enter(child) = child else { continue };
+ let call = match child {
+ syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
+ Some(it) => {
+ children.skip_subtree();
+ it
+ }
+ None => continue,
+ },
+ syntax::NodeOrToken::Token(t) => {
+ mapping.insert(
+ TextRange::new(
+ apply_offset(t.text_range().start(), offset),
+ apply_offset(t.text_range().end(), offset),
+ ),
+ t.text_range(),
+ );
+ continue;
+ }
+ };
+ let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+ Some(path) => match macro_resolver(path.clone()) {
+ Some(def) => def,
+ None => {
+ error =
+ Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
+ continue;
+ }
+ },
None => {
error = Some(ExpandError::other("malformed macro invocation"));
continue;
@@ -150,20 +202,32 @@ fn eager_macro_recur(
};
let ExpandResult { value, err } = match def.kind {
MacroDefKind::BuiltInEager(..) => {
- let ExpandResult { value, err } = match expand_eager_macro_input(
+ let ExpandResult { value, err } = expand_eager_macro_input(
db,
krate,
- curr.with_value(child.clone()),
+ curr.with_value(call.clone()),
def,
macro_resolver,
- ) {
- Ok(it) => it,
- Err(err) => return Err(err),
- };
+ );
match value {
- Some(call) => {
+ Some(call_id) => {
let ExpandResult { value, err: err2 } =
- db.parse_macro_expansion(call.as_macro_file());
+ db.parse_macro_expansion(call_id.as_macro_file());
+
+ if let Some(tt) = call.token_tree() {
+ let call_tt_start = tt.syntax().text_range().start();
+ let call_start =
+ apply_offset(call.syntax().text_range().start(), offset);
+ if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
+ mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
+ value
+ .1
+ .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
+ .map(|r| (r + call_start, range + call_tt_start))
+ }));
+ }
+ }
+
ExpandResult {
value: Some(value.0.syntax_node().clone_for_update()),
err: err.or(err2),
@@ -177,36 +241,63 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInAttr(..)
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
- let ExpandResult { value, err } =
- lazy_expand(db, &def, curr.with_value(child.clone()), krate);
+ let ExpandResult { value: (parse, tm), err } =
+ lazy_expand(db, &def, curr.with_value(call.clone()), krate);
+ let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
+ Some(db.decl_macro_expander(def.krate, ast_id))
+ } else {
+ None
+ };
// replace macro inside
- let hygiene = Hygiene::new(db, value.file_id);
+ let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
&hygiene,
// FIXME: We discard parse errors here
- value.map(|it| it.syntax_node()),
+ parse.as_ref().map(|it| it.syntax_node()),
krate,
macro_resolver,
- )?;
+ );
let err = err.or(error);
- ExpandResult { value, err }
+
+ if let Some(tt) = call.token_tree() {
+ let call_tt_start = tt.syntax().text_range().start();
+ let call_start = apply_offset(call.syntax().text_range().start(), offset);
+ if let Some((_tt, arg_map, _)) = parse
+ .file_id
+ .macro_file()
+ .and_then(|id| db.macro_arg(id.macro_call_id).value)
+ .as_deref()
+ {
+ mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
+ tm.first_range_by_token(
+ decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
+ syntax::SyntaxKind::TOMBSTONE,
+ )
+ .map(|r| (r + call_start, range + call_tt_start))
+ }));
+ }
+ }
+ // FIXME: Do we need to re-use _m here?
+ ExpandResult { value: value.map(|(n, _m)| n), err }
}
};
if err.is_some() {
error = err;
}
// check if the whole original syntax is replaced
- if child.syntax() == &original {
- return Ok(ExpandResult { value, err: error });
+ if call.syntax() == &original {
+ return ExpandResult { value: value.zip(Some(mapping)), err: error };
}
if let Some(insert) = value {
- replacements.push((child, insert));
+ offset += u32::from(insert.text_range().len()) as i32
+ - u32::from(call.syntax().text_range().len()) as i32;
+ replacements.push((call, insert));
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
- Ok(ExpandResult { value: Some(original), err: error })
+ ExpandResult { value: Some((original, mapping)), err: error }
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index 00796e7c0..e6e8d8c02 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -26,7 +26,7 @@ pub(crate) struct SyntaxFixups {
/// This is the information needed to reverse the fixups.
#[derive(Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
- original: Vec<Subtree>,
+ original: Box<[Subtree]>,
}
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
@@ -272,7 +272,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
replace,
token_map,
next_id,
- undo_info: SyntaxFixupUndoInfo { original },
+ undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
}
}
@@ -472,13 +472,13 @@ fn foo () {match __ra_fixup {}}
check(
r#"
fn foo() {
- match x {
+ match it {
}
}
"#,
expect![[r#"
-fn foo () {match x {}}
+fn foo () {match it {}}
"#]],
)
}
@@ -547,11 +547,11 @@ fn foo () {a . __ra_fixup ; bar () ;}
check(
r#"
fn foo() {
- let x = a
+ let it = a
}
"#,
expect![[r#"
-fn foo () {let x = a ;}
+fn foo () {let it = a ;}
"#]],
)
}
@@ -561,11 +561,11 @@ fn foo () {let x = a ;}
check(
r#"
fn foo() {
- let x = a.
+ let it = a.
}
"#,
expect![[r#"
-fn foo () {let x = a . __ra_fixup ;}
+fn foo () {let it = a . __ra_fixup ;}
"#]],
)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index 10f8fe9ce..ade4a5928 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -126,7 +126,7 @@ struct HygieneInfo {
/// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
- macro_def: Arc<TokenExpander>,
+ macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
@@ -149,19 +149,15 @@ impl HygieneInfo {
token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def_start?)
}
- None => (
- &self.macro_arg.1,
- InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
- ),
+ None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
},
_ => match origin {
- mbe::Origin::Call => (
- &self.macro_arg.1,
- InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
- ),
- mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) {
- (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
- (def_site_token_map, *tt)
+ mbe::Origin::Call => {
+ (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
+ }
+ mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
+ (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
+ (&expander.def_site_token_map, *tt)
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
@@ -177,7 +173,7 @@ fn make_hygiene_info(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
-) -> Option<HygieneInfo> {
+) -> HygieneInfo {
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
@@ -198,9 +194,9 @@ fn make_hygiene_info(
_ => None,
});
- let macro_def = db.macro_def(loc.def).ok()?;
+ let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
- let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
+ let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
@@ -208,7 +204,7 @@ fn make_hygiene_info(
))
});
- Some(HygieneInfo {
+ HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
@@ -216,7 +212,7 @@ fn make_hygiene_info(
macro_arg,
macro_def,
exp_map,
- })
+ }
}
impl HygieneFrame {
@@ -225,8 +221,7 @@ impl HygieneFrame {
None => (None, None, false),
Some(macro_file) => {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let info =
- make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info));
+ let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
match loc.def.kind {
MacroDefKind::Declarative(_) => {
(info, Some(loc.def.krate), loc.def.local_inner)
@@ -240,17 +235,14 @@ impl HygieneFrame {
}
};
- let (calling_file, info) = match info {
- None => {
- return HygieneFrame {
- expansion: None,
- local_inner,
- krate,
- call_site: None,
- def_site: None,
- };
+ let Some((info, calling_file)) = info else {
+ return HygieneFrame {
+ expansion: None,
+ local_inner,
+ krate,
+ call_site: None,
+ def_site: None,
}
- Some(it) => it,
};
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index e0c199328..1f1e20f49 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -37,11 +37,11 @@ use either::Either;
use syntax::{
algo::{self, skip_trivia_token},
ast::{self, AstNode, HasDocComments},
- Direction, SyntaxNode, SyntaxToken,
+ AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken,
};
use crate::{
- ast_id_map::FileAstId,
+ ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId},
attrs::AttrId,
builtin_attr_macro::BuiltinAttrExpander,
builtin_derive_macro::BuiltinDeriveExpander,
@@ -127,7 +127,8 @@ impl_intern_key!(MacroCallId);
pub struct MacroCallLoc {
pub def: MacroDefId,
pub(crate) krate: CrateId,
- /// Some if `def` is a builtin eager macro.
+ /// Some if this is a macro call for an eager macro. Note that this is `None`
+ /// for the eager input macro file.
eager: Option<Box<EagerCallInfo>>,
pub kind: MacroCallKind,
}
@@ -152,11 +153,10 @@ pub enum MacroDefKind {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct EagerCallInfo {
- /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
+ /// The expanded argument of the eager macro.
arg: Arc<(tt::Subtree, TokenMap)>,
- /// call id of the eager macro's input file. If this is none, macro call containing this call info
- /// is an eager macro's input, otherwise it is its output.
- arg_id: Option<MacroCallId>,
+ /// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
+ arg_id: MacroCallId,
error: Option<ExpandError>,
}
@@ -221,11 +221,7 @@ impl HirFileId {
HirFileIdRepr::FileId(id) => break id,
HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
- let is_include_expansion = loc.def.is_include()
- && matches!(
- loc.eager.as_deref(),
- Some(EagerCallInfo { arg_id: Some(_), .. })
- );
+ let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
Some(Ok((_, file))) => file.into(),
_ => loc.kind.file_id(),
@@ -270,57 +266,13 @@ impl HirFileId {
/// Return expansion information if it is a macro-expansion file
pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
let macro_file = self.macro_file()?;
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
-
- let arg_tt = loc.kind.arg(db)?;
-
- let macro_def = db.macro_def(loc.def).ok()?;
- let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
- let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
- Arc::new((
- tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
- Default::default(),
- Default::default(),
- ))
- });
-
- let def = loc.def.ast_id().left().and_then(|id| {
- let def_tt = match id.to_node(db) {
- ast::Macro::MacroRules(mac) => mac.token_tree()?,
- ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => {
- return None
- }
- ast::Macro::MacroDef(mac) => mac.body()?,
- };
- Some(InFile::new(id.file_id, def_tt))
- });
- let attr_input_or_mac_def = def.or_else(|| match loc.kind {
- MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
- // FIXME: handle `cfg_attr`
- let tt = ast_id
- .to_node(db)
- .doc_comments_and_attrs()
- .nth(invoc_attr_index.ast_index())
- .and_then(Either::left)?
- .token_tree()?;
- Some(InFile::new(ast_id.file_id, tt))
- }
- _ => None,
- });
-
- Some(ExpansionInfo {
- expanded: InFile::new(self, parse.syntax_node()),
- arg: InFile::new(loc.kind.file_id(), arg_tt),
- attr_input_or_mac_def,
- macro_arg_shift: mbe::Shift::new(&macro_arg.0),
- macro_arg,
- macro_def,
- exp_map,
- })
+ ExpansionInfo::new(db, macro_file)
}
- /// Indicate it is macro file generated for builtin derive
- pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive_attr_node(
+ &self,
+ db: &dyn db::ExpandDatabase,
+ ) -> Option<InFile<ast::Attr>> {
let macro_file = self.macro_file()?;
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let attr = match loc.def.kind {
@@ -333,8 +285,22 @@ impl HirFileId {
pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _))
+ matches!(
+ db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
+ MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ )
+ }
+ None => false,
+ }
+ }
+
+ pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
+ match self.macro_file() {
+ Some(macro_file) => {
+ matches!(
+ db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
+ MacroDefKind::BuiltInDerive(..)
+ )
}
None => false,
}
@@ -344,8 +310,7 @@ impl HirFileId {
pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
match self.macro_file() {
Some(macro_file) => {
- let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- loc.def.is_include()
+ db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
}
_ => false,
}
@@ -355,7 +320,7 @@ impl HirFileId {
match self.macro_file() {
Some(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. }))
+ matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
}
_ => false,
}
@@ -450,6 +415,24 @@ impl MacroDefId {
)
}
+ pub fn is_derive(&self) -> bool {
+ matches!(
+ self.kind,
+ MacroDefKind::BuiltInDerive(..)
+ | MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
+ )
+ }
+
+ pub fn is_fn_like(&self) -> bool {
+ matches!(
+ self.kind,
+ MacroDefKind::BuiltIn(..)
+ | MacroDefKind::ProcMacro(_, ProcMacroKind::FuncLike, _)
+ | MacroDefKind::BuiltInEager(..)
+ | MacroDefKind::Declarative(..)
+ )
+ }
+
pub fn is_attribute_derive(&self) -> bool {
matches!(self.kind, MacroDefKind::BuiltInAttr(expander, ..) if expander.is_derive())
}
@@ -536,9 +519,9 @@ impl MacroCallKind {
};
let range = match kind {
- MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
- MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
- MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::Derive { ast_id, .. } => ast_id.to_ptr(db).text_range(),
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).text_range(),
};
FileRange { range, file_id }
@@ -588,13 +571,18 @@ impl MacroCallKind {
FileRange { range, file_id }
}
- fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> {
+ fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
match self {
- MacroCallKind::FnLike { ast_id, .. } => {
- Some(ast_id.to_node(db).token_tree()?.syntax().clone())
+ MacroCallKind::FnLike { ast_id, .. } => ast_id
+ .to_in_file_node(db)
+ .map(|it| Some(it.token_tree()?.syntax().clone()))
+ .transpose(),
+ MacroCallKind::Derive { ast_id, .. } => {
+ Some(ast_id.to_in_file_node(db).syntax().cloned())
+ }
+ MacroCallKind::Attr { ast_id, .. } => {
+ Some(ast_id.to_in_file_node(db).syntax().cloned())
}
- MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
- MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
}
}
}
@@ -612,13 +600,13 @@ impl MacroCallId {
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ExpansionInfo {
- expanded: InFile<SyntaxNode>,
+ expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes
arg: InFile<SyntaxNode>,
/// The `macro_rules!` or attribute input.
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
- macro_def: Arc<TokenExpander>,
+ macro_def: TokenExpander,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
@@ -628,7 +616,7 @@ pub struct ExpansionInfo {
impl ExpansionInfo {
pub fn expanded(&self) -> InFile<SyntaxNode> {
- self.expanded.clone()
+ self.expanded.clone().into()
}
pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
@@ -659,7 +647,7 @@ impl ExpansionInfo {
let token_id_in_attr_input = if let Some(item) = item {
// check if we are mapping down in an attribute input
// this is a special case as attributes can have two inputs
- let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let call_id = self.expanded.file_id.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
let token_range = token.value.text_range();
@@ -705,7 +693,7 @@ impl ExpansionInfo {
let relative_range =
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
let token_id = self.macro_arg.1.token_by_range(relative_range)?;
- // conditionally shift the id by a declaratives macro definition
+ // conditionally shift the id by a declarative macro definition
self.macro_def.map_id_down(token_id)
}
};
@@ -715,7 +703,7 @@ impl ExpansionInfo {
.ranges_by_token(token_id, token.value.kind())
.flat_map(move |range| self.expanded.value.covering_element(range).into_token());
- Some(tokens.map(move |token| self.expanded.with_value(token)))
+ Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
}
/// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
@@ -724,18 +712,17 @@ impl ExpansionInfo {
db: &dyn db::ExpandDatabase,
token: InFile<&SyntaxToken>,
) -> Option<(InFile<SyntaxToken>, Origin)> {
+ assert_eq!(token.file_id, self.expanded.file_id.into());
// Fetch the id through its text range,
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
// conditionally unshifting the id to accommodate for macro-rules def site
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
- let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let call_id = self.expanded.file_id.macro_call_id;
let loc = db.lookup_intern_macro_call(call_id);
// Special case: map tokens from `include!` expansions to the included file
- if loc.def.is_include()
- && matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. }))
- {
+ if loc.def.is_include() {
if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
let source = db.parse(file_id);
@@ -765,9 +752,9 @@ impl ExpansionInfo {
}
_ => match origin {
mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
- mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
- (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
- (def_site_token_map, tt.syntax().cloned())
+ mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) {
+ (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
+ (&expander.def_site_token_map, tt.syntax().cloned())
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
@@ -779,6 +766,58 @@ impl ExpansionInfo {
tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
Some((tt.with_value(token), origin))
}
+
+ fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ let arg_tt = loc.kind.arg(db)?;
+
+ let macro_def = db.macro_expander(loc.def);
+ let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
+ let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
+
+ let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
+ Arc::new((
+ tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
+ Default::default(),
+ Default::default(),
+ ))
+ });
+
+ let def = loc.def.ast_id().left().and_then(|id| {
+ let def_tt = match id.to_node(db) {
+ ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
+ return None
+ }
+ ast::Macro::MacroDef(mac) => mac.body()?,
+ };
+ Some(InFile::new(id.file_id, def_tt))
+ });
+ let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ let tt = ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index.ast_index())
+ .and_then(Either::left)?
+ .token_tree()?;
+ Some(InFile::new(ast_id.file_id, tt))
+ }
+ _ => None,
+ });
+
+ Some(ExpansionInfo {
+ expanded,
+ arg: arg_tt,
+ attr_input_or_mac_def,
+ macro_arg_shift: mbe::Shift::new(&macro_arg.0),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+ }
}
/// `AstId` points to an AST node in any file.
@@ -786,10 +825,26 @@ impl ExpansionInfo {
/// It is stable across reparses, and can be used as salsa key/value.
pub type AstId<N> = InFile<FileAstId<N>>;
-impl<N: AstNode> AstId<N> {
+impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
- let root = db.parse_or_expand(self.file_id);
- db.ast_id_map(self.file_id).get(self.value).to_node(&root)
+ self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
+ }
+ pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
+ InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
+ }
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
+ db.ast_id_map(self.file_id).get(self.value)
+ }
+}
+
+pub type ErasedAstId = InFile<ErasedFileAstId>;
+
+impl ErasedAstId {
+ pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
+ self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
+ }
+ pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
+ db.ast_id_map(self.file_id).get_raw(self.value)
}
}
@@ -850,7 +905,7 @@ impl<L, R> InFile<Either<L, R>> {
}
}
-impl<'a> InFile<&'a SyntaxNode> {
+impl InFile<&SyntaxNode> {
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
@@ -1011,6 +1066,18 @@ impl InFile<SyntaxToken> {
}
}
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InMacroFile<T> {
+ pub file_id: MacroFile,
+ pub value: T,
+}
+
+impl<T> From<InMacroFile<T>> for InFile<T> {
+ fn from(macro_file: InMacroFile<T>) -> Self {
+ InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
+ }
+}
+
fn ascend_node_border_tokens(
db: &dyn db::ExpandDatabase,
InFile { file_id, value: node }: InFile<&SyntaxNode>,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index 47a8ab7de..69aa09c4a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -126,7 +126,7 @@ struct Display<'a> {
path: &'a ModPath,
}
-impl<'a> fmt::Display for Display<'a> {
+impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
display_fmt_path(self.db, self.path, f, true)
}
@@ -137,7 +137,7 @@ struct UnescapedDisplay<'a> {
path: &'a UnescapedModPath<'a>,
}
-impl<'a> fmt::Display for UnescapedDisplay<'a> {
+impl fmt::Display for UnescapedDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
display_fmt_path(self.db, self.path.0, f, false)
}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index f8dbb8427..7c179c0cf 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -24,7 +24,7 @@ enum Repr {
TupleField(usize),
}
-impl<'a> UnescapedName<'a> {
+impl UnescapedName<'_> {
/// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over
/// [`ToString::to_string`] if possible as this conversion is cheaper in the general case.
pub fn to_smol_str(&self) -> SmolStr {
@@ -40,7 +40,7 @@ impl<'a> UnescapedName<'a> {
}
}
- pub fn display(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
+ pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ {
_ = db;
UnescapedDisplay { name: self }
}
@@ -96,6 +96,15 @@ impl Name {
Name::new_inline("[missing name]")
}
+ /// Returns true if this is a fake name for things missing in the source code. See
+ /// [`missing()`][Self::missing] for details.
+ ///
+ /// Use this method instead of comparing with `Self::missing()` as missing names
+ /// (ideally should) have a `gensym` semantics.
+ pub fn is_missing(&self) -> bool {
+ self == &Name::missing()
+ }
+
/// Generates a new name which is only equal to itself, by incrementing a counter. Due
/// its implementation, it should not be used in things that salsa considers, like
/// type names or field names, and it should be only used in names of local variables
@@ -162,7 +171,7 @@ struct Display<'a> {
name: &'a Name,
}
-impl<'a> fmt::Display for Display<'a> {
+impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.name.0 {
Repr::Text(text) => fmt::Display::fmt(&text, f),
@@ -175,7 +184,7 @@ struct UnescapedDisplay<'a> {
name: &'a UnescapedName<'a>,
}
-impl<'a> fmt::Display for UnescapedDisplay<'a> {
+impl fmt::Display for UnescapedDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.name.0 .0 {
Repr::Text(text) => {
@@ -282,8 +291,10 @@ pub mod known {
alloc,
iter,
ops,
+ fmt,
future,
result,
+ string,
boxed,
option,
prelude,
@@ -311,6 +322,7 @@ pub mod known {
RangeToInclusive,
RangeTo,
Range,
+ String,
Neg,
Not,
None,
@@ -321,6 +333,7 @@ pub mod known {
iter_mut,
len,
is_empty,
+ as_str,
new,
// Builtin macros
asm,
@@ -334,6 +347,7 @@ pub mod known {
core_panic,
env,
file,
+ format,
format_args_nl,
format_args,
global_asm,
@@ -365,6 +379,7 @@ pub mod known {
cfg_eval,
crate_type,
derive,
+ derive_const,
global_allocator,
no_core,
no_std,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index c8bea3450..abc19d63a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -19,14 +19,15 @@ bitflags = "2.1.0"
smallvec.workspace = true
ena = "0.14.0"
either = "1.7.0"
+oorandom = "11.1.3"
tracing = "0.1.35"
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
-chalk-solve = { version = "0.91.0", default-features = false }
-chalk-ir = "0.91.0"
-chalk-recursive = { version = "0.91.0", default-features = false }
-chalk-derive = "0.91.0"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+chalk-solve = { version = "0.92.0", default-features = false }
+chalk-ir = "0.92.0"
+chalk-recursive = { version = "0.92.0", default-features = false }
+chalk-derive = "0.92.0"
+la-arena.workspace = true
once_cell = "1.17.0"
triomphe.workspace = true
nohash-hasher.workspace = true
@@ -47,7 +48,6 @@ limit.workspace = true
expect-test = "1.4.0"
tracing = "0.1.35"
tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "env-filter",
"registry",
] }
tracing-tree = "0.2.1"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
index 3860bccec..4625a3b01 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -36,7 +36,7 @@ pub fn autoderef(
) -> impl Iterator<Item = Ty> {
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
- let mut autoderef = Autoderef::new(&mut table, ty);
+ let mut autoderef = Autoderef::new(&mut table, ty, false);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
// `ty` may contain unresolved inference variables. Since there's no chance they would be
@@ -63,12 +63,13 @@ pub(crate) struct Autoderef<'a, 'db> {
ty: Ty,
at_start: bool,
steps: Vec<(AutoderefKind, Ty)>,
+ explicit: bool,
}
impl<'a, 'db> Autoderef<'a, 'db> {
- pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self {
+ pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self {
let ty = table.resolve_ty_shallow(&ty);
- Autoderef { table, ty, at_start: true, steps: Vec::new() }
+ Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit }
}
pub(crate) fn step_count(&self) -> usize {
@@ -97,7 +98,7 @@ impl Iterator for Autoderef<'_, '_> {
return None;
}
- let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?;
+ let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?;
self.steps.push((kind, self.ty.clone()));
self.ty = new_ty;
@@ -109,8 +110,9 @@ impl Iterator for Autoderef<'_, '_> {
pub(crate) fn autoderef_step(
table: &mut InferenceTable<'_>,
ty: Ty,
+ explicit: bool,
) -> Option<(AutoderefKind, Ty)> {
- if let Some(derefed) = builtin_deref(table, &ty, false) {
+ if let Some(derefed) = builtin_deref(table, &ty, explicit) {
Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed)))
} else {
Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?))
@@ -124,7 +126,6 @@ pub(crate) fn builtin_deref<'ty>(
) -> Option<&'ty Ty> {
match ty.kind(Interner) {
TyKind::Ref(.., ty) => Some(ty),
- // FIXME: Maybe accept this but diagnose if its not explicit?
TyKind::Raw(.., ty) if explicit => Some(ty),
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
if crate::lang_items::is_box(table.db, adt) {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index 5dd8e2719..f4fbace19 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -5,13 +5,13 @@ use std::{iter, sync::Arc};
use tracing::debug;
-use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
+use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds};
use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
use base_db::CrateId;
use hir_def::{
hir::Movability,
- lang_item::{lang_attr, LangItem, LangItemTarget},
+ lang_item::{LangItem, LangItemTarget},
AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId,
};
use hir_expand::name::name;
@@ -46,7 +46,7 @@ pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Inte
pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
pub(crate) type Variances = chalk_ir::Variances<Interner>;
-impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
+impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
self.db.associated_ty_data(id)
}
@@ -60,9 +60,37 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
// FIXME: keep track of these
Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
}
- fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
- // FIXME: keep track of this
- chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner)
+ fn discriminant_type(&self, ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
+ if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) {
+ if let hir_def::AdtId::EnumId(e) = id.0 {
+ let enum_data = self.db.enum_data(e);
+ let ty = enum_data.repr.unwrap_or_default().discr_type();
+ return chalk_ir::TyKind::Scalar(match ty {
+ hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed {
+ true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize),
+ false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize),
+ },
+ hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed {
+ true => chalk_ir::Scalar::Int(match size {
+ hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8,
+ hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16,
+ hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32,
+ hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64,
+ hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128,
+ }),
+ false => chalk_ir::Scalar::Uint(match size {
+ hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8,
+ hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16,
+ hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32,
+ hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64,
+ hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128,
+ }),
+ },
+ })
+ .intern(Interner);
+ }
+ }
+ chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner)
}
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
self.db.impl_datum(self.krate, impl_id)
@@ -565,7 +593,7 @@ pub(crate) fn trait_datum_query(
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
- let well_known = lang_attr(db.upcast(), trait_).and_then(well_known_trait_from_lang_item);
+ let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item);
let trait_datum = TraitDatum {
id: trait_id,
binders: make_binders(db, &generic_params, trait_datum_bound),
@@ -593,6 +621,7 @@ fn well_known_trait_from_lang_item(item: LangItem) -> Option<WellKnownTrait> {
LangItem::Unsize => WellKnownTrait::Unsize,
LangItem::Tuple => WellKnownTrait::Tuple,
LangItem::PointeeTrait => WellKnownTrait::Pointee,
+ LangItem::FnPtrTrait => WellKnownTrait::FnPtr,
_ => return None,
})
}
@@ -614,6 +643,7 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem {
WellKnownTrait::Unpin => LangItem::Unpin,
WellKnownTrait::Unsize => LangItem::Unsize,
WellKnownTrait::Pointee => LangItem::PointeeTrait,
+ WellKnownTrait::FnPtr => LangItem::FnPtrTrait,
}
}
@@ -844,28 +874,34 @@ pub(super) fn generic_predicate_to_inline_bound(
}
let args_no_self = trait_ref.substitution.as_slice(Interner)[1..]
.iter()
- .map(|ty| ty.clone().cast(Interner))
+ .cloned()
+ .casted(Interner)
.collect();
let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self };
Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
}
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
- let trait_ = projection_ty.trait_(db);
- if projection_ty.self_type_parameter(db) != self_ty_shifted_in {
+ let generics =
+ generics(db.upcast(), from_assoc_type_id(projection_ty.associated_ty_id).into());
+ let (assoc_args, trait_args) =
+ projection_ty.substitution.as_slice(Interner).split_at(generics.len_self());
+ let (self_ty, args_no_self) =
+ trait_args.split_first().expect("projection without trait self type");
+ if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in {
return None;
}
- let args_no_self = projection_ty.substitution.as_slice(Interner)[1..]
- .iter()
- .map(|ty| ty.clone().cast(Interner))
- .collect();
+
+ let args_no_self = args_no_self.iter().cloned().casted(Interner).collect();
+ let parameters = assoc_args.to_vec();
+
let alias_eq_bound = rust_ir::AliasEqBound {
value: ty.clone(),
trait_bound: rust_ir::TraitBound {
- trait_id: to_chalk_trait_id(trait_),
+ trait_id: to_chalk_trait_id(projection_ty.trait_(db)),
args_no_self,
},
associated_ty_id: projection_ty.associated_ty_id,
- parameters: Vec::new(), // FIXME we don't support generic associated types yet
+ parameters,
};
Some(chalk_ir::Binders::new(
binders,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index a8071591a..c0b243ea2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -343,7 +343,8 @@ impl TyExt for Ty {
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
let crate_id = owner.module(db.upcast()).krate();
- let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else {
+ let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait())
+ else {
return false;
};
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 262341c6e..1c0f7b08d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -16,7 +16,8 @@ use triomphe::Arc;
use crate::{
db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode,
mir::monomorphize_mir_body_bad, to_placeholder_idx, utils::Generics, Const, ConstData,
- ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, Ty, TyBuilder,
+ ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty,
+ TyBuilder,
};
use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@@ -88,7 +89,7 @@ pub(crate) fn path_to_const(
ConstValue::Placeholder(to_placeholder_idx(db, p.into()))
}
ParamLoweringMode::Variable => match args.param_idx(p.into()) {
- Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
+ Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)),
None => {
never!(
"Generic list doesn't contain this param: {:?}, {:?}, {:?}",
@@ -135,15 +136,15 @@ pub fn intern_const_ref(
ty: Ty,
krate: CrateId,
) -> Const {
- let layout = db.layout_of_ty(ty.clone(), krate);
+ let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate)));
let bytes = match value {
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
- let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
+ let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
LiteralConstRef::UInt(i) => {
- let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16);
+ let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
}
LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
@@ -171,9 +172,9 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
- ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
+ ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(&it, false))),
ConstScalar::UnevaluatedConst(c, subst) => {
- let ec = db.const_eval(*c, subst.clone()).ok()?;
+ let ec = db.const_eval(*c, subst.clone(), None).ok()?;
try_const_usize(db, &ec)
}
_ => None,
@@ -186,6 +187,7 @@ pub(crate) fn const_eval_recover(
_: &[String],
_: &GeneralConstId,
_: &Substitution,
+ _: &Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError> {
Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
@@ -210,6 +212,7 @@ pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
def: GeneralConstId,
subst: Substitution,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> Result<Const, ConstEvalError> {
let body = match def {
GeneralConstId::ConstId(c) => {
@@ -228,7 +231,7 @@ pub(crate) fn const_eval_query(
}
GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?,
};
- let c = interpret_mir(db, &body, false).0?;
+ let c = interpret_mir(db, body, false, trait_env).0?;
Ok(c)
}
@@ -241,7 +244,7 @@ pub(crate) fn const_eval_static_query(
Substitution::empty(Interner),
db.trait_environment_for_body(def.into()),
)?;
- let c = interpret_mir(db, &body, false).0?;
+ let c = interpret_mir(db, body, false, None).0?;
Ok(c)
}
@@ -268,7 +271,7 @@ pub(crate) fn const_eval_discriminant_variant(
Substitution::empty(Interner),
db.trait_environment_for_body(def),
)?;
- let c = interpret_mir(db, &mir_body, false).0?;
+ let c = interpret_mir(db, mir_body, false, None).0?;
let c = try_const_usize(db, &c).unwrap() as i128;
Ok(c)
}
@@ -293,7 +296,7 @@ pub(crate) fn eval_to_const(
}
let infer = ctx.clone().resolve_all();
if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
- if let Ok(result) = interpret_mir(db, &mir_body, true).0 {
+ if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true, None).0 {
return result;
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 0db1fefbf..666955fa1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1,10 +1,11 @@
use base_db::{fixture::WithFixture, FileId};
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
+use test_utils::skip_slow_tests;
use crate::{
consteval::try_const_usize, db::HirDatabase, mir::pad16, test_db::TestDB, Const, ConstScalar,
- Interner,
+ Interner, MemoryMap,
};
use super::{
@@ -16,7 +17,7 @@ mod intrinsics;
fn simplify(e: ConstEvalError) -> ConstEvalError {
match e {
- ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e, _, _)) => {
+ ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => {
simplify(ConstEvalError::MirEvalError(*e))
}
_ => e,
@@ -36,7 +37,37 @@ fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) {
#[track_caller]
fn check_number(ra_fixture: &str, answer: i128) {
- let (db, file_id) = TestDB::with_single_file(ra_fixture);
+ check_answer(ra_fixture, |b, _| {
+ assert_eq!(
+ b,
+ &answer.to_le_bytes()[0..b.len()],
+ "Bytes differ. In decimal form: actual = {}, expected = {answer}",
+ i128::from_le_bytes(pad16(b, true))
+ );
+ });
+}
+
+#[track_caller]
+fn check_str(ra_fixture: &str, answer: &str) {
+ check_answer(ra_fixture, |b, mm| {
+ let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
+ let size = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
+ let Some(bytes) = mm.get(addr, size) else {
+ panic!("string data missed in the memory map");
+ };
+ assert_eq!(
+ bytes,
+ answer.as_bytes(),
+ "Bytes differ. In string form: actual = {}, expected = {answer}",
+ String::from_utf8_lossy(bytes)
+ );
+ });
+}
+
+#[track_caller]
+fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
+ let (db, file_ids) = TestDB::with_many_files(ra_fixture);
+ let file_id = *file_ids.last().unwrap();
let r = match eval_goal(&db, file_id) {
Ok(t) => t,
Err(e) => {
@@ -46,13 +77,8 @@ fn check_number(ra_fixture: &str, answer: i128) {
};
match &r.data(Interner).value {
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
- ConstScalar::Bytes(b, _) => {
- assert_eq!(
- b,
- &answer.to_le_bytes()[0..b.len()],
- "Bytes differ. In decimal form: actual = {}, expected = {answer}",
- i128::from_le_bytes(pad16(b, true))
- );
+ ConstScalar::Bytes(b, mm) => {
+ check(b, mm);
}
x => panic!("Expected number but found {:?}", x),
},
@@ -87,8 +113,8 @@ fn eval_goal(db: &TestDB, file_id: FileId) -> Result<Const, ConstEvalError> {
}
_ => None,
})
- .unwrap();
- db.const_eval(const_id.into(), Substitution::empty(Interner))
+ .expect("No const named GOAL found in the test");
+ db.const_eval(const_id.into(), Substitution::empty(Interner), None)
}
#[test]
@@ -108,6 +134,7 @@ fn bit_op() {
check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| {
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string()))
});
+ check_number(r#"const GOAL: i32 = 100000000i32 << 11"#, (100000000i32 << 11) as i128);
}
#[test]
@@ -166,14 +193,21 @@ fn casts() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
+ struct X {
+ unsize_field: [u8],
+ }
+
const GOAL: usize = {
let a = [10, 20, 3, 15];
let x: &[i32] = &a;
- let y: *const [i32] = x;
- let z = y as *const [u8]; // slice fat pointer cast don't touch metadata
- let q = z as *const str;
- let p = q as *const [u8];
- let w = unsafe { &*z };
+ let x: *const [i32] = x;
+ let x = x as *const [u8]; // slice fat pointer cast don't touch metadata
+ let x = x as *const str;
+ let x = x as *const X;
+ let x = x as *const [i16];
+ let x = x as *const X;
+ let x = x as *const [u8];
+ let w = unsafe { &*x };
w.len()
};
"#,
@@ -199,6 +233,30 @@ fn raw_pointer_equality() {
}
#[test]
+fn alignment() {
+ check_answer(
+ r#"
+//- minicore: transmute
+use core::mem::transmute;
+const GOAL: usize = {
+ let x: i64 = 2;
+ transmute(&x)
+}
+ "#,
+ |b, _| assert_eq!(b[0] % 8, 0),
+ );
+ check_answer(
+ r#"
+//- minicore: transmute
+use core::mem::transmute;
+static X: i64 = 12;
+const GOAL: usize = transmute(&X);
+ "#,
+ |b, _| assert_eq!(b[0] % 8, 0),
+ );
+}
+
+#[test]
fn locals() {
check_number(
r#"
@@ -1129,6 +1187,25 @@ fn pattern_matching_ergonomics() {
}
#[test]
+fn destructing_assignment() {
+ check_number(
+ r#"
+ //- minicore: add
+ const fn f(i: &mut u8) -> &mut u8 {
+ *i += 1;
+ i
+ }
+ const GOAL: u8 = {
+ let mut i = 4;
+ _ = f(&mut i);
+ i
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
fn let_else() {
check_number(
r#"
@@ -1370,14 +1447,14 @@ fn builtin_derive_macro() {
#[derive(Clone)]
struct Y {
field1: i32,
- field2: u8,
+ field2: ((i32, u8), i64),
}
const GOAL: u8 = {
- let x = X(2, Z::Foo(Y { field1: 4, field2: 5 }), 8);
+ let x = X(2, Z::Foo(Y { field1: 4, field2: ((32, 5), 12) }), 8);
let x = x.clone();
let Z::Foo(t) = x.1;
- t.field2
+ t.field2.0 .1
};
"#,
5,
@@ -1551,6 +1628,58 @@ fn closures() {
}
#[test]
+fn manual_fn_trait_impl() {
+ check_number(
+ r#"
+//- minicore: fn, copy
+struct S(i32);
+
+impl FnOnce<(i32, i32)> for S {
+ type Output = i32;
+
+ extern "rust-call" fn call_once(self, arg: (i32, i32)) -> i32 {
+ arg.0 + arg.1 + self.0
+ }
+}
+
+const GOAL: i32 = {
+ let s = S(1);
+ s(2, 3)
+};
+"#,
+ 6,
+ );
+}
+
+#[test]
+fn closure_capture_unsized_type() {
+ check_number(
+ r#"
+ //- minicore: fn, copy, slice, index, coerce_unsized
+ fn f<T: A>(x: &<T as A>::Ty) -> &<T as A>::Ty {
+ let c = || &*x;
+ c()
+ }
+
+ trait A {
+ type Ty;
+ }
+
+ impl A for i32 {
+ type Ty = [u8];
+ }
+
+ const GOAL: u8 = {
+ let k: &[u8] = &[1, 2, 3];
+ let k = f::<i32>(k);
+ k[0] + k[1] + k[2]
+ }
+ "#,
+ 6,
+ );
+}
+
+#[test]
fn closure_and_impl_fn() {
check_number(
r#"
@@ -1636,6 +1765,24 @@ fn function_pointer_in_constants() {
}
#[test]
+fn function_pointer_and_niche_optimization() {
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: i32 = {
+ let f: fn(i32) -> i32 = |x| x + 2;
+ let init = Some(f);
+ match init {
+ Some(t) => t(3),
+ None => 222,
+ }
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
fn function_pointer() {
check_number(
r#"
@@ -1663,6 +1810,18 @@ fn function_pointer() {
);
check_number(
r#"
+ fn add2(x: u8) -> u8 {
+ x + 2
+ }
+ const GOAL: u8 = {
+ let plus2 = add2 as fn(u8) -> u8;
+ plus2(3)
+ };
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
//- minicore: coerce_unsized, index, slice
fn add2(x: u8) -> u8 {
x + 2
@@ -1847,6 +2006,65 @@ fn dyn_trait() {
"#,
900,
);
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ trait A {
+ fn x(&self) -> i32;
+ }
+
+ trait B: A {}
+
+ impl A for i32 {
+ fn x(&self) -> i32 {
+ 5
+ }
+ }
+
+ impl B for i32 {
+
+ }
+
+ const fn f(x: &dyn B) -> i32 {
+ x.x()
+ }
+
+ const GOAL: i32 = f(&2i32);
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn coerce_unsized() {
+ check_number(
+ r#"
+//- minicore: coerce_unsized, deref_mut, slice, index, transmute, non_null
+use core::ops::{Deref, DerefMut, CoerceUnsized};
+use core::{marker::Unsize, mem::transmute, ptr::NonNull};
+
+struct ArcInner<T: ?Sized> {
+ strong: usize,
+ weak: usize,
+ data: T,
+}
+
+pub struct Arc<T: ?Sized> {
+ inner: NonNull<ArcInner<T>>,
+}
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
+
+const GOAL: usize = {
+ let x = transmute::<usize, Arc<[i32; 3]>>(12);
+ let y: Arc<[i32]> = x;
+ let z = transmute::<Arc<[i32]>, (usize, usize)>(y);
+ z.1
+};
+
+ "#,
+ 3,
+ );
}
#[test]
@@ -1961,6 +2179,17 @@ fn array_and_index() {
}
#[test]
+fn string() {
+ check_str(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: &str = "hello";
+ "#,
+ "hello",
+ );
+}
+
+#[test]
fn byte_string() {
check_number(
r#"
@@ -2018,6 +2247,57 @@ fn consts() {
"#,
6,
);
+
+ check_number(
+ r#"
+ const F1: i32 = 2147483647;
+ const F2: i32 = F1 - 25;
+ const GOAL: i32 = F2;
+ "#,
+ 2147483622,
+ );
+
+ check_number(
+ r#"
+ const F1: i32 = -2147483648;
+ const F2: i32 = F1 + 18;
+ const GOAL: i32 = F2;
+ "#,
+ -2147483630,
+ );
+
+ check_number(
+ r#"
+ const F1: i32 = 10;
+ const F2: i32 = F1 - 20;
+ const GOAL: i32 = F2;
+ "#,
+ -10,
+ );
+
+ check_number(
+ r#"
+ const F1: i32 = 25;
+ const F2: i32 = F1 - 25;
+ const GOAL: i32 = F2;
+ "#,
+ 0,
+ );
+
+ check_number(
+ r#"
+ const A: i32 = -2147483648;
+ const GOAL: bool = A > 0;
+ "#,
+ 0,
+ );
+
+ check_number(
+ r#"
+ const GOAL: i64 = (-2147483648_i32) as i64;
+ "#,
+ -2147483648,
+ );
}
#[test]
@@ -2116,11 +2396,14 @@ fn const_loop() {
fn const_transfer_memory() {
check_number(
r#"
- const A1: &i32 = &2;
- const A2: &i32 = &5;
- const GOAL: i32 = *A1 + *A2;
+ //- minicore: slice, index, coerce_unsized
+ const A1: &i32 = &1;
+ const A2: &i32 = &10;
+ const A3: [&i32; 3] = [&1, &2, &100];
+ const A4: (i32, &i32) = (1, &1000);
+ const GOAL: i32 = *A1 + *A2 + *A3[2] + *A4.1;
"#,
- 7,
+ 1111,
);
}
@@ -2287,6 +2570,51 @@ fn const_trait_assoc() {
);
check_number(
r#"
+ //- /a/lib.rs crate:a
+ pub trait ToConst {
+ const VAL: usize;
+ }
+ pub const fn to_const<T: ToConst>() -> usize {
+ T::VAL
+ }
+ //- /main.rs crate:main deps:a
+ use a::{ToConst, to_const};
+ struct U0;
+ impl ToConst for U0 {
+ const VAL: usize = 5;
+ }
+ const GOAL: usize = to_const::<U0>();
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ //- minicore: size_of, fn
+ //- /a/lib.rs crate:a
+ use core::mem::size_of;
+ pub struct S<T>(T);
+ impl<T> S<T> {
+ pub const X: usize = {
+ let k: T;
+ let f = || core::mem::size_of::<T>();
+ f()
+ };
+ }
+ //- /main.rs crate:main deps:a
+ use a::{S};
+ trait Tr {
+ type Ty;
+ }
+ impl Tr for i32 {
+ type Ty = u64;
+ }
+ struct K<T: Tr>(<T as Tr>::Ty);
+ const GOAL: usize = S::<K<i32>>::X;
+ "#,
+ 8,
+ );
+ check_number(
+ r#"
struct S<T>(*mut T);
trait MySized: Sized {
@@ -2311,21 +2639,11 @@ fn const_trait_assoc() {
}
#[test]
-fn panic_messages() {
- check_fail(
- r#"
- //- minicore: panic
- const GOAL: u8 = {
- let x: u16 = 2;
- panic!("hello");
- };
- "#,
- |e| e == ConstEvalError::MirEvalError(MirEvalError::Panic("hello".to_string())),
- );
-}
-
-#[test]
fn exec_limits() {
+ if skip_slow_tests() {
+ return;
+ }
+
check_fail(
r#"
const GOAL: usize = loop {};
@@ -2339,7 +2657,7 @@ fn exec_limits() {
}
const GOAL: i32 = f(0);
"#,
- |e| e == ConstEvalError::MirEvalError(MirEvalError::StackOverflow),
+ |e| e == ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded),
);
// Reasonable code should still work
check_number(
@@ -2356,9 +2674,31 @@ fn exec_limits() {
}
sum
}
- const GOAL: i32 = f(10000);
+ const GOAL: i32 = f(1000);
"#,
- 10000 * 10000,
+ 1000 * 1000,
+ );
+}
+
+#[test]
+fn memory_limit() {
+ check_fail(
+ r#"
+ extern "Rust" {
+ #[rustc_allocator]
+ fn __rust_alloc(size: usize, align: usize) -> *mut u8;
+ }
+
+ const GOAL: u8 = unsafe {
+ __rust_alloc(30_000_000_000, 1); // 30GB
+ 2
+ };
+ "#,
+ |e| {
+ e == ConstEvalError::MirEvalError(MirEvalError::Panic(
+ "Memory allocation of 30000000000 bytes failed".to_string(),
+ ))
+ },
);
}
@@ -2377,6 +2717,37 @@ fn type_error() {
}
#[test]
+fn unsized_field() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice, transmute
+ use core::mem::transmute;
+
+ struct Slice([usize]);
+ struct Slice2(Slice);
+
+ impl Slice2 {
+ fn as_inner(&self) -> &Slice {
+ &self.0
+ }
+
+ fn as_bytes(&self) -> &[usize] {
+ &self.as_inner().0
+ }
+ }
+
+ const GOAL: usize = unsafe {
+ let x: &[usize] = &[1, 2, 3];
+ let x: &Slice2 = transmute(x);
+ let x = x.as_bytes();
+ x[0] + x[1] + x[2] + x.len() * 100
+ };
+ "#,
+ 306,
+ );
+}
+
+#[test]
fn unsized_local() {
check_fail(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs
index e05d824db..2855f7890 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs
@@ -15,6 +15,171 @@ fn size_of() {
}
#[test]
+fn size_of_val() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X(i32, u8);
+
+ const GOAL: usize = size_of_val(&X(1, 2));
+ "#,
+ 8,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ let it: &[i32] = &[1, 2, 3];
+ size_of_val(it)
+ };
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, transmute
+ use core::mem::transmute;
+
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X {
+ x: i64,
+ y: u8,
+ t: [i32],
+ }
+
+ const GOAL: usize = unsafe {
+ let y: &X = transmute([0usize, 3]);
+ size_of_val(y)
+ };
+ "#,
+ 24,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, transmute
+ use core::mem::transmute;
+
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X {
+ x: i32,
+ y: i64,
+ t: [u8],
+ }
+
+ const GOAL: usize = unsafe {
+ let y: &X = transmute([0usize, 15]);
+ size_of_val(y)
+ };
+ "#,
+ 32,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, fmt, builtin_impls
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ let x: &i16 = &5;
+ let y: &dyn core::fmt::Debug = x;
+ let z: &dyn core::fmt::Debug = &y;
+ size_of_val(x) + size_of_val(y) * 10 + size_of_val(z) * 100
+ };
+ "#,
+ 1622,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ size_of_val("salam")
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn min_align_of_val() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ struct X(i32, u8);
+
+ const GOAL: usize = min_align_of_val(&X(1, 2));
+ "#,
+ 4,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
+ }
+
+ const GOAL: usize = {
+ let x: &[i32] = &[1, 2, 3];
+ min_align_of_val(x)
+ };
+ "#,
+ 4,
+ );
+}
+
+#[test]
+fn type_name() {
+ check_str(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn type_name<T: ?Sized>() -> &'static str;
+ }
+
+ const GOAL: &str = type_name::<i32>();
+ "#,
+ "i32",
+ );
+ check_str(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn type_name<T: ?Sized>() -> &'static str;
+ }
+
+ mod mod1 {
+ pub mod mod2 {
+ pub struct Ty;
+ }
+ }
+
+ const GOAL: &str = type_name::<mod1::mod2::Ty>();
+ "#,
+ "mod1::mod2::Ty",
+ );
+}
+
+#[test]
fn transmute() {
check_number(
r#"
@@ -29,9 +194,28 @@ fn transmute() {
}
#[test]
+fn read_via_copy() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn read_via_copy<T>(e: *const T) -> T;
+ pub fn volatile_load<T>(e: *const T) -> T;
+ }
+
+ const GOAL: i32 = {
+ let x = 2;
+ read_via_copy(&x) + volatile_load(&x)
+ };
+ "#,
+ 4,
+ );
+}
+
+#[test]
fn const_eval_select() {
check_number(
r#"
+ //- minicore: fn
extern "rust-intrinsic" {
pub fn const_eval_select<ARG, F, G, RET>(arg: ARG, called_in_const: F, called_at_rt: G) -> RET
where
@@ -68,7 +252,29 @@ fn wrapping_add() {
}
#[test]
-fn saturating_add() {
+fn ptr_offset_from() {
+ check_number(
+ r#"
+ //- minicore: index, slice, coerce_unsized
+ extern "rust-intrinsic" {
+ pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
+ pub fn ptr_offset_from_unsigned<T>(ptr: *const T, base: *const T) -> usize;
+ }
+
+ const GOAL: isize = {
+ let x = [1, 2, 3, 4, 5i32];
+ let r1 = -ptr_offset_from(&x[0], &x[4]);
+ let r2 = ptr_offset_from(&x[3], &x[1]);
+ let r3 = ptr_offset_from_unsigned(&x[3], &x[0]) as isize;
+ r3 * 100 + r2 * 10 + r1
+ };
+ "#,
+ 324,
+ );
+}
+
+#[test]
+fn saturating() {
check_number(
r#"
extern "rust-intrinsic" {
@@ -82,6 +288,16 @@ fn saturating_add() {
check_number(
r#"
extern "rust-intrinsic" {
+ pub fn saturating_sub<T>(a: T, b: T) -> T;
+ }
+
+ const GOAL: bool = saturating_sub(5u8, 7) == 0 && saturating_sub(8u8, 4) == 4;
+ "#,
+ 1,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
pub fn saturating_add<T>(a: T, b: T) -> T;
}
@@ -112,6 +328,7 @@ fn allocator() {
*ptr = 23;
*ptr2 = 32;
let ptr = __rust_realloc(ptr, 4, 1, 8);
+ let ptr = __rust_realloc(ptr, 8, 1, 3);
let ptr2 = ((ptr as usize) + 1) as *mut u8;
*ptr + *ptr2
};
@@ -160,6 +377,24 @@ fn needs_drop() {
}
#[test]
+fn discriminant_value() {
+ check_number(
+ r#"
+ //- minicore: discriminant, option
+ use core::marker::DiscriminantKind;
+ extern "rust-intrinsic" {
+ pub fn discriminant_value<T>(v: &T) -> <T as DiscriminantKind>::Discriminant;
+ }
+ const GOAL: bool = {
+ discriminant_value(&Some(2i32)) == discriminant_value(&Some(5i32))
+ && discriminant_value(&Some(2i32)) != discriminant_value(&None::<i32>)
+ };
+ "#,
+ 1,
+ );
+}
+
+#[test]
fn likely() {
check_number(
r#"
@@ -225,6 +460,8 @@ fn atomic() {
pub fn atomic_nand_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_or_release<T: Copy>(dst: *mut T, src: T) -> T;
pub fn atomic_xor_seqcst<T: Copy>(dst: *mut T, src: T) -> T;
+ pub fn atomic_fence_seqcst();
+ pub fn atomic_singlethreadfence_acqrel();
}
fn should_not_reach() {
@@ -239,6 +476,7 @@ fn atomic() {
if (30, true) != atomic_cxchg_release_seqcst(&mut y, 30, 40) {
should_not_reach();
}
+ atomic_fence_seqcst();
if (40, false) != atomic_cxchg_release_seqcst(&mut y, 30, 50) {
should_not_reach();
}
@@ -246,6 +484,7 @@ fn atomic() {
should_not_reach();
}
let mut z = atomic_xsub_seqcst(&mut x, -200);
+ atomic_singlethreadfence_acqrel();
atomic_xor_seqcst(&mut x, 1024);
atomic_load_seqcst(&x) + z * 3 + atomic_load_seqcst(&y) * 2
};
@@ -328,6 +567,24 @@ fn copy_nonoverlapping() {
}
#[test]
+fn write_bytes() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
+ }
+
+ const GOAL: i32 = unsafe {
+ let mut x = 2;
+ write_bytes(&mut x, 5, 1);
+ x
+ };
+ "#,
+ 0x05050505,
+ );
+}
+
+#[test]
fn copy() {
check_number(
r#"
@@ -363,6 +620,20 @@ fn ctpop() {
}
#[test]
+fn ctlz() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn ctlz<T: Copy>(x: T) -> T;
+ }
+
+ const GOAL: u8 = ctlz(0b0001_1100_u8);
+ "#,
+ 3,
+ );
+}
+
+#[test]
fn cttz() {
check_number(
r#"
@@ -375,3 +646,85 @@ fn cttz() {
3,
);
}
+
+#[test]
+fn rotate() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_left<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i64 = rotate_left(0xaa00000000006e1i64, 12);
+ "#,
+ 0x6e10aa,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_right<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i64 = rotate_right(0x6e10aa, 12);
+ "#,
+ 0xaa00000000006e1,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_left<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i8 = rotate_left(129, 2);
+ "#,
+ 6,
+ );
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn rotate_right<T: Copy>(x: T, y: T) -> T;
+ }
+
+ const GOAL: i32 = rotate_right(10006016, 1020315);
+ "#,
+ 320192512,
+ );
+}
+
+#[test]
+fn simd() {
+ check_number(
+ r#"
+ pub struct i8x16(
+ i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,
+ );
+ extern "platform-intrinsic" {
+ pub fn simd_bitmask<T, U>(x: T) -> U;
+ }
+ const GOAL: u16 = simd_bitmask(i8x16(
+ 0, 1, 0, 0, 2, 255, 100, 0, 50, 0, 1, 1, 0, 0, 0, 0
+ ));
+ "#,
+ 0b0000110101110010,
+ );
+ check_number(
+ r#"
+ pub struct i8x16(
+ i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,
+ );
+ extern "platform-intrinsic" {
+ pub fn simd_lt<T, U>(x: T, y: T) -> U;
+ pub fn simd_bitmask<T, U>(x: T) -> U;
+ }
+ const GOAL: u16 = simd_bitmask(simd_lt::<i8x16, i8x16>(
+ i8x16(
+ -105, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
+ ),
+ i8x16(
+ -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11
+ ),
+ ));
+ "#,
+ 0xFFFF,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 9dd810f84..9c96b5ab8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -77,8 +77,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
- fn const_eval(&self, def: GeneralConstId, subst: Substitution)
- -> Result<Const, ConstEvalError>;
+ fn const_eval(
+ &self,
+ def: GeneralConstId,
+ subst: Substitution,
+ trait_env: Option<Arc<crate::TraitEnvironment>>,
+ ) -> Result<Const, ConstEvalError>;
#[salsa::invoke(crate::consteval::const_eval_static_query)]
#[salsa::cycle(crate::consteval::const_eval_static_recover)]
@@ -100,16 +104,28 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
&self,
def: AdtId,
subst: Substitution,
- krate: CrateId,
+ env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::layout_of_ty_query)]
#[salsa::cycle(crate::layout::layout_of_ty_recover)]
- fn layout_of_ty(&self, ty: Ty, krate: CrateId) -> Result<Arc<Layout>, LayoutError>;
+ fn layout_of_ty(
+ &self,
+ ty: Ty,
+ env: Arc<crate::TraitEnvironment>,
+ ) -> Result<Arc<Layout>, LayoutError>;
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
+ #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
+ fn lookup_impl_method(
+ &self,
+ env: Arc<crate::TraitEnvironment>,
+ func: FunctionId,
+ fn_subst: Substitution,
+ ) -> (FunctionId, Substitution);
+
#[salsa::invoke(crate::lower::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
index 4b147b997..ef43ed5c4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -5,7 +5,7 @@ mod unsafe_check;
mod decl_check;
pub use crate::diagnostics::{
- decl_check::{incorrect_case, IncorrectCase},
+ decl_check::{incorrect_case, CaseType, IncorrectCase},
expr::{
record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic,
},
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index 1233469b9..a94a962c1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -14,13 +14,12 @@ mod case_conv;
use std::fmt;
-use base_db::CrateId;
use hir_def::{
data::adt::VariantData,
hir::{Pat, PatId},
src::HasSource,
- AdtId, AttrDefId, ConstId, EnumId, FunctionId, ItemContainerId, Lookup, ModuleDefId, StaticId,
- StructId,
+ AdtId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, ItemContainerId,
+ Lookup, ModuleDefId, StaticId, StructId,
};
use hir_expand::{
name::{AsName, Name},
@@ -44,24 +43,20 @@ mod allow {
pub(super) const NON_CAMEL_CASE_TYPES: &str = "non_camel_case_types";
}
-pub fn incorrect_case(
- db: &dyn HirDatabase,
- krate: CrateId,
- owner: ModuleDefId,
-) -> Vec<IncorrectCase> {
+pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = profile::span("validate_module_item");
- let mut validator = DeclValidator::new(db, krate);
+ let mut validator = DeclValidator::new(db);
validator.validate_item(owner);
validator.sink
}
#[derive(Debug)]
pub enum CaseType {
- // `some_var`
+ /// `some_var`
LowerSnakeCase,
- // `SOME_CONST`
+ /// `SOME_CONST`
UpperSnakeCase,
- // `SomeStruct`
+ /// `SomeStruct`
UpperCamelCase,
}
@@ -120,7 +115,6 @@ pub struct IncorrectCase {
pub(super) struct DeclValidator<'a> {
db: &'a dyn HirDatabase,
- krate: CrateId,
pub(super) sink: Vec<IncorrectCase>,
}
@@ -132,8 +126,8 @@ struct Replacement {
}
impl<'a> DeclValidator<'a> {
- pub(super) fn new(db: &'a dyn HirDatabase, krate: CrateId) -> DeclValidator<'a> {
- DeclValidator { db, krate, sink: Vec::new() }
+ pub(super) fn new(db: &'a dyn HirDatabase) -> DeclValidator<'a> {
+ DeclValidator { db, sink: Vec::new() }
}
pub(super) fn validate_item(&mut self, item: ModuleDefId) {
@@ -181,6 +175,8 @@ impl<'a> DeclValidator<'a> {
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()),
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
+ AttrDefId::ExternCrateId(id) => Some(id.lookup(self.db.upcast()).container.into()),
+ AttrDefId::UseId(id) => Some(id.lookup(self.db.upcast()).container.into()),
// These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid {
@@ -194,8 +190,7 @@ impl<'a> DeclValidator<'a> {
AttrDefId::TypeAliasId(_) => None,
AttrDefId::GenericParamId(_) => None,
}
- .map(|mid| self.allowed(mid, allow_name, true))
- .unwrap_or(false)
+ .is_some_and(|mid| self.allowed(mid, allow_name, true))
}
fn validate_func(&mut self, func: FunctionId) {
@@ -205,17 +200,7 @@ impl<'a> DeclValidator<'a> {
return;
}
- let body = self.db.body(func.into());
-
- // Recursively validate inner scope items, such as static variables and constants.
- for (_, block_def_map) in body.blocks(self.db.upcast()) {
- for (_, module) in block_def_map.modules() {
- for def_id in module.scope.declarations() {
- let mut validator = DeclValidator::new(self.db, self.krate);
- validator.validate_item(def_id);
- }
- }
- }
+ self.validate_body_inner_items(func.into());
// Check whether non-snake case identifiers are allowed for this function.
if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
@@ -230,6 +215,8 @@ impl<'a> DeclValidator<'a> {
expected_case: CaseType::LowerSnakeCase,
});
+ let body = self.db.body(func.into());
+
// Check the patterns inside the function body.
// This includes function parameters.
let pats_replacements = body
@@ -495,6 +482,11 @@ impl<'a> DeclValidator<'a> {
fn validate_enum(&mut self, enum_id: EnumId) {
let data = self.db.enum_data(enum_id);
+ for (local_id, _) in data.variants.iter() {
+ let variant_id = EnumVariantId { parent: enum_id, local_id };
+ self.validate_body_inner_items(variant_id.into());
+ }
+
// Check whether non-camel case names are allowed for this enum.
if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
@@ -511,13 +503,11 @@ impl<'a> DeclValidator<'a> {
// Check the field names.
let enum_fields_replacements = data
.variants
- .iter()
- .filter_map(|(_, variant)| {
+ .values()
+ .filter_map(|variant| {
Some(Replacement {
current_name: variant.name.clone(),
- suggested_text: to_camel_case(
- &variant.name.display(self.db.upcast()).to_string(),
- )?,
+ suggested_text: to_camel_case(&variant.name.to_smol_str())?,
expected_case: CaseType::UpperCamelCase,
})
})
@@ -621,6 +611,8 @@ impl<'a> DeclValidator<'a> {
fn validate_const(&mut self, const_id: ConstId) {
let data = self.db.const_data(const_id);
+ self.validate_body_inner_items(const_id.into());
+
if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
@@ -630,7 +622,7 @@ impl<'a> DeclValidator<'a> {
None => return,
};
- let const_name = name.display(self.db.upcast()).to_string();
+ let const_name = name.to_smol_str();
let replacement = if let Some(new_name) = to_upper_snake_case(&const_name) {
Replacement {
current_name: name.clone(),
@@ -669,13 +661,15 @@ impl<'a> DeclValidator<'a> {
return;
}
+ self.validate_body_inner_items(static_id.into());
+
if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
let name = &data.name;
- let static_name = name.display(self.db.upcast()).to_string();
+ let static_name = name.to_smol_str();
let replacement = if let Some(new_name) = to_upper_snake_case(&static_name) {
Replacement {
current_name: name.clone(),
@@ -706,4 +700,17 @@ impl<'a> DeclValidator<'a> {
self.sink.push(diagnostic);
}
+
+ // FIXME: We don't currently validate names within `DefWithBodyId::InTypeConstId`.
+ /// Recursively validates inner scope items, such as static variables and constants.
+ fn validate_body_inner_items(&mut self, body_id: DefWithBodyId) {
+ let body = self.db.body(body_id);
+ for (_, block_def_map) in body.blocks(self.db.upcast()) {
+ for (_, module) in block_def_map.modules() {
+ for def_id in module.scope.declarations() {
+ self.validate_item(def_id);
+ }
+ }
+ }
+ }
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index c1df24d17..1b4ee4613 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -29,6 +29,7 @@ use itertools::Itertools;
use la_arena::ArenaMap;
use smallvec::SmallVec;
use stdx::never;
+use triomphe::Arc;
use crate::{
consteval::try_const_usize,
@@ -43,26 +44,19 @@ use crate::{
AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
- Substitution, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
+ Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
};
pub trait HirWrite: fmt::Write {
- fn start_location_link(&mut self, location: ModuleDefId);
- fn end_location_link(&mut self);
+ fn start_location_link(&mut self, _location: ModuleDefId) {}
+ fn end_location_link(&mut self) {}
}
// String will ignore link metadata
-impl HirWrite for String {
- fn start_location_link(&mut self, _: ModuleDefId) {}
-
- fn end_location_link(&mut self) {}
-}
+impl HirWrite for String {}
// `core::Formatter` will ignore metadata
-impl HirWrite for fmt::Formatter<'_> {
- fn start_location_link(&mut self, _: ModuleDefId) {}
- fn end_location_link(&mut self) {}
-}
+impl HirWrite for fmt::Formatter<'_> {}
pub struct HirFormatter<'a> {
pub db: &'a dyn HirDatabase,
@@ -192,7 +186,7 @@ pub trait HirDisplay {
}
}
-impl<'a> HirFormatter<'a> {
+impl HirFormatter<'_> {
pub fn write_joined<T: HirDisplay>(
&mut self,
iter: impl IntoIterator<Item = T>,
@@ -342,7 +336,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
}
}
-impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
+impl<T> fmt::Display for HirDisplayWrapper<'_, T>
where
T: HirDisplay,
{
@@ -360,7 +354,7 @@ where
const TYPE_HINT_TRUNCATION: &str = "…";
-impl<T: HirDisplay> HirDisplay for &'_ T {
+impl<T: HirDisplay> HirDisplay for &T {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
HirDisplay::hir_fmt(*self, f)
}
@@ -446,28 +440,6 @@ impl HirDisplay for Const {
}
}
-pub struct HexifiedConst(pub Const);
-
-impl HirDisplay for HexifiedConst {
- fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
- let data = &self.0.data(Interner);
- if let TyKind::Scalar(s) = data.ty.kind(Interner) {
- if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
- if let ConstValue::Concrete(c) = &data.value {
- if let ConstScalar::Bytes(b, m) = &c.interned {
- let value = u128::from_le_bytes(pad16(b, false));
- if value >= 10 {
- render_const_scalar(f, &b, m, &data.ty)?;
- return write!(f, " ({:#X})", value);
- }
- }
- }
- }
- }
- self.0.hir_fmt(f)
- }
-}
-
fn render_const_scalar(
f: &mut HirFormatter<'_>,
b: &[u8],
@@ -476,33 +448,35 @@ fn render_const_scalar(
) -> Result<(), HirDisplayError> {
// FIXME: We need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`.
- let krate = *f.db.crate_graph().crates_in_topological_order().last().unwrap();
+ let trait_env = Arc::new(TraitEnvironment::empty(
+ *f.db.crate_graph().crates_in_topological_order().last().unwrap(),
+ ));
match ty.kind(Interner) {
TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
Scalar::Char => {
- let x = u128::from_le_bytes(pad16(b, false)) as u32;
- let Ok(c) = char::try_from(x) else {
+ let it = u128::from_le_bytes(pad16(b, false)) as u32;
+ let Ok(c) = char::try_from(it) else {
return f.write_str("<unicode-error>");
};
write!(f, "{c:?}")
}
Scalar::Int(_) => {
- let x = i128::from_le_bytes(pad16(b, true));
- write!(f, "{x}")
+ let it = i128::from_le_bytes(pad16(b, true));
+ write!(f, "{it}")
}
Scalar::Uint(_) => {
- let x = u128::from_le_bytes(pad16(b, false));
- write!(f, "{x}")
+ let it = u128::from_le_bytes(pad16(b, false));
+ write!(f, "{it}")
}
Scalar::Float(fl) => match fl {
chalk_ir::FloatTy::F32 => {
- let x = f32::from_le_bytes(b.try_into().unwrap());
- write!(f, "{x:?}")
+ let it = f32::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{it:?}")
}
chalk_ir::FloatTy::F64 => {
- let x = f64::from_le_bytes(b.try_into().unwrap());
- write!(f, "{x:?}")
+ let it = f64::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{it:?}")
}
},
},
@@ -519,7 +493,7 @@ fn render_const_scalar(
TyKind::Slice(ty) => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let count = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@@ -545,7 +519,7 @@ fn render_const_scalar(
let Ok(t) = memory_map.vtable.ty(ty_id) else {
return f.write_str("<ty-missing-in-vtable-map>");
};
- let Ok(layout) = f.db.layout_of_ty(t.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@@ -577,7 +551,7 @@ fn render_const_scalar(
return f.write_str("<layout-error>");
}
});
- let Ok(layout) = f.db.layout_of_ty(t.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@@ -589,7 +563,7 @@ fn render_const_scalar(
}
},
TyKind::Tuple(_, subst) => {
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else {
return f.write_str("<layout-error>");
};
f.write_str("(")?;
@@ -602,7 +576,7 @@ fn render_const_scalar(
}
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize();
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else {
f.write_str("<layout-error>")?;
continue;
};
@@ -612,7 +586,7 @@ fn render_const_scalar(
f.write_str(")")
}
TyKind::Adt(adt, subst) => {
- let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone(), trait_env.clone()) else {
return f.write_str("<layout-error>");
};
match adt.0 {
@@ -624,7 +598,7 @@ fn render_const_scalar(
&data.variant_data,
f,
&field_types,
- adt.0.module(f.db.upcast()).krate(),
+ f.db.trait_environment(adt.0.into()),
&layout,
subst,
b,
@@ -636,7 +610,8 @@ fn render_const_scalar(
}
hir_def::AdtId::EnumId(e) => {
let Some((var_id, var_layout)) =
- detect_variant_from_bytes(&layout, f.db, krate, b, e) else {
+ detect_variant_from_bytes(&layout, f.db, trait_env.clone(), b, e)
+ else {
return f.write_str("<failed-to-detect-variant>");
};
let data = &f.db.enum_data(e).variants[var_id];
@@ -647,7 +622,7 @@ fn render_const_scalar(
&data.variant_data,
f,
&field_types,
- adt.0.module(f.db.upcast()).krate(),
+ f.db.trait_environment(adt.0.into()),
&var_layout,
subst,
b,
@@ -658,15 +633,15 @@ fn render_const_scalar(
}
TyKind::FnDef(..) => ty.hir_fmt(f),
TyKind::Function(_) | TyKind::Raw(_, _) => {
- let x = u128::from_le_bytes(pad16(b, false));
- write!(f, "{:#X} as ", x)?;
+ let it = u128::from_le_bytes(pad16(b, false));
+ write!(f, "{:#X} as ", it)?;
ty.hir_fmt(f)
}
TyKind::Array(ty, len) => {
let Some(len) = try_const_usize(f.db, len) else {
return f.write_str("<unknown-array-len>");
};
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env) else {
return f.write_str("<layout-error>");
};
let size_one = layout.size.bytes_usize();
@@ -705,7 +680,7 @@ fn render_variant_after_name(
data: &VariantData,
f: &mut HirFormatter<'_>,
field_types: &ArenaMap<LocalFieldId, Binders<Ty>>,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
layout: &Layout,
subst: &Substitution,
b: &[u8],
@@ -716,7 +691,7 @@ fn render_variant_after_name(
let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
let offset = layout.fields.offset(u32::from(id.into_raw()) as usize).bytes_usize();
let ty = field_types[id].clone().substitute(Interner, subst);
- let Ok(layout) = f.db.layout_of_ty(ty.clone(), krate) else {
+ let Ok(layout) = f.db.layout_of_ty(ty.clone(), trait_env.clone()) else {
return f.write_str("<layout-error>");
};
let size = layout.size.bytes_usize();
@@ -735,7 +710,7 @@ fn render_variant_after_name(
}
write!(f, " }}")?;
} else {
- let mut it = it.map(|x| x.0);
+ let mut it = it.map(|it| it.0);
write!(f, "(")?;
if let Some(id) = it.next() {
render_field(f, id)?;
@@ -903,6 +878,13 @@ impl HirDisplay for Ty {
TyKind::FnDef(def, parameters) => {
let def = from_chalk(db, *def);
let sig = db.callable_item_signature(def).substitute(Interner, parameters);
+
+ if f.display_target.is_source_code() {
+ // `FnDef` is anonymous and there's no surface syntax for it. Show it as a
+ // function pointer type.
+ return sig.hir_fmt(f);
+ }
+
f.start_location_link(def.into());
match def {
CallableDefId::FunctionId(ff) => {
@@ -1277,19 +1259,20 @@ fn hir_fmt_generics(
i: usize,
parameters: &Substitution,
) -> bool {
- if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error)
+ if parameter.ty(Interner).map(|it| it.kind(Interner))
+ == Some(&TyKind::Error)
{
return true;
}
if let Some(ConstValue::Concrete(c)) =
- parameter.constant(Interner).map(|x| &x.data(Interner).value)
+ parameter.constant(Interner).map(|it| &it.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
}
}
let default_parameter = match default_parameters.get(i) {
- Some(x) => x,
+ Some(it) => it,
None => return true,
};
let actual_default =
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 1ac0837b5..b4915dbf0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -13,6 +13,15 @@
//! to certain types. To record this, we use the union-find implementation from
//! the `ena` crate, which is extracted from rustc.
+mod cast;
+pub(crate) mod closure;
+mod coerce;
+mod expr;
+mod mutability;
+mod pat;
+mod path;
+pub(crate) mod unify;
+
use std::{convert::identity, ops::Index};
use chalk_ir::{
@@ -60,15 +69,8 @@ pub use coerce::could_coerce;
#[allow(unreachable_pub)]
pub use unify::could_unify;
-pub(crate) use self::closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
-
-pub(crate) mod unify;
-mod path;
-mod expr;
-mod pat;
-mod coerce;
-pub(crate) mod closure;
-mod mutability;
+use cast::CastCheck;
+pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
@@ -290,7 +292,7 @@ impl Default for InternedStandardTypes {
/// ```
///
/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
-/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
+/// E.g., `struct Foo<T> { it: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
/// The autoderef and -ref are the same as in the above example, but the type
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
/// the underlying conversions from `[i32; 4]` to `[i32]`.
@@ -508,6 +510,8 @@ pub(crate) struct InferenceContext<'a> {
diverges: Diverges,
breakables: Vec<BreakableContext>,
+ deferred_cast_checks: Vec<CastCheck>,
+
// fields related to closure capture
current_captures: Vec<CapturedItemWithoutTy>,
current_closure: Option<ClosureId>,
@@ -582,7 +586,8 @@ impl<'a> InferenceContext<'a> {
resolver,
diverges: Diverges::Maybe,
breakables: Vec::new(),
- current_captures: vec![],
+ deferred_cast_checks: Vec::new(),
+ current_captures: Vec::new(),
current_closure: None,
deferred_closures: FxHashMap::default(),
closure_dependencies: FxHashMap::default(),
@@ -594,7 +599,7 @@ impl<'a> InferenceContext<'a> {
// used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment.
pub(crate) fn resolve_all(self) -> InferenceResult {
- let InferenceContext { mut table, mut result, .. } = self;
+ let InferenceContext { mut table, mut result, deferred_cast_checks, .. } = self;
// Destructure every single field so whenever new fields are added to `InferenceResult` we
// don't forget to handle them here.
let InferenceResult {
@@ -622,6 +627,13 @@ impl<'a> InferenceContext<'a> {
table.fallback_if_possible();
+ // Comment from rustc:
+ // Even though coercion casts provide type hints, we check casts after fallback for
+ // backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
+ for cast in deferred_cast_checks {
+ cast.check(&mut table);
+ }
+
// FIXME resolve obligations as well (use Guidance if necessary)
table.resolve_obligations_as_possible();
@@ -1172,7 +1184,7 @@ impl<'a> InferenceContext<'a> {
unresolved: Option<usize>,
path: &ModPath,
) -> (Ty, Option<VariantId>) {
- let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0);
+ let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
match remaining {
None => {
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
@@ -1232,7 +1244,9 @@ impl<'a> InferenceContext<'a> {
.as_function()?
.lookup(self.db.upcast())
.container
- else { return None };
+ else {
+ return None;
+ };
self.resolve_output_on(trait_)
}
@@ -1322,7 +1336,7 @@ impl Expectation {
/// The primary use case is where the expected type is a fat pointer,
/// like `&[isize]`. For example, consider the following statement:
///
- /// let x: &[isize] = &[1, 2, 3];
+ /// let it: &[isize] = &[1, 2, 3];
///
/// In this case, the expected type for the `&[1, 2, 3]` expression is
/// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
new file mode 100644
index 000000000..9e1c74b16
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/cast.rs
@@ -0,0 +1,46 @@
+//! Type cast logic. Basically coercion + additional casts.
+
+use crate::{infer::unify::InferenceTable, Interner, Ty, TyExt, TyKind};
+
+#[derive(Clone, Debug)]
+pub(super) struct CastCheck {
+ expr_ty: Ty,
+ cast_ty: Ty,
+}
+
+impl CastCheck {
+ pub(super) fn new(expr_ty: Ty, cast_ty: Ty) -> Self {
+ Self { expr_ty, cast_ty }
+ }
+
+ pub(super) fn check(self, table: &mut InferenceTable<'_>) {
+ // FIXME: This function currently only implements the bits that influence the type
+ // inference. We should return the adjustments on success and report diagnostics on error.
+ let expr_ty = table.resolve_ty_shallow(&self.expr_ty);
+ let cast_ty = table.resolve_ty_shallow(&self.cast_ty);
+
+ if expr_ty.contains_unknown() || cast_ty.contains_unknown() {
+ return;
+ }
+
+ if table.coerce(&expr_ty, &cast_ty).is_ok() {
+ return;
+ }
+
+ if check_ref_to_ptr_cast(expr_ty, cast_ty, table) {
+ // Note that this type of cast is actually split into a coercion to a
+ // pointer type and a cast:
+ // &[T; N] -> *[T; N] -> *T
+ return;
+ }
+
+ // FIXME: Check other kinds of non-coercion casts and report error if any?
+ }
+}
+
+fn check_ref_to_ptr_cast(expr_ty: Ty, cast_ty: Ty, table: &mut InferenceTable<'_>) -> bool {
+ let Some((expr_inner_ty, _, _)) = expr_ty.as_reference() else { return false; };
+ let Some((cast_inner_ty, _)) = cast_ty.as_raw_ptr() else { return false; };
+ let TyKind::Array(expr_elt_ty, _) = expr_inner_ty.kind(Interner) else { return false; };
+ table.coerce(expr_elt_ty, cast_inner_ty).is_ok()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index ff64ae252..1781f6c58 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -139,7 +139,7 @@ impl HirPlace {
) -> CaptureKind {
match current_capture {
CaptureKind::ByRef(BorrowKind::Mut { .. }) => {
- if self.projections[len..].iter().any(|x| *x == ProjectionElem::Deref) {
+ if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) {
current_capture = CaptureKind::ByRef(BorrowKind::Unique);
}
}
@@ -199,7 +199,7 @@ impl CapturedItem {
.to_string(),
VariantData::Tuple(fields) => fields
.iter()
- .position(|x| x.0 == f.local_id)
+ .position(|it| it.0 == f.local_id)
.unwrap_or_default()
.to_string(),
VariantData::Unit => "[missing field]".to_string(),
@@ -439,10 +439,10 @@ impl InferenceContext<'_> {
}
fn walk_expr(&mut self, tgt_expr: ExprId) {
- if let Some(x) = self.result.expr_adjustments.get_mut(&tgt_expr) {
+ if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) {
// FIXME: this take is completely unneeded, and just is here to make borrow checker
// happy. Remove it if you can.
- let x_taken = mem::take(x);
+ let x_taken = mem::take(it);
self.walk_expr_with_adjust(tgt_expr, &x_taken);
*self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken;
} else {
@@ -488,10 +488,6 @@ impl InferenceContext<'_> {
self.consume_expr(*tail);
}
}
- Expr::While { condition, body, label: _ } => {
- self.consume_expr(*condition);
- self.consume_expr(*body);
- }
Expr::Call { callee, args, is_assignee_expr: _ } => {
self.consume_expr(*callee);
self.consume_exprs(args.iter().copied());
@@ -536,7 +532,7 @@ impl InferenceContext<'_> {
if let &Some(expr) = spread {
self.consume_expr(expr);
}
- self.consume_exprs(fields.iter().map(|x| x.expr));
+ self.consume_exprs(fields.iter().map(|it| it.expr));
}
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
@@ -548,7 +544,7 @@ impl InferenceContext<'_> {
} else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
let mutability = 'b: {
if let Some(deref_trait) =
- self.resolve_lang_item(LangItem::DerefMut).and_then(|x| x.as_trait())
+ self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
{
if let Some(deref_fn) =
self.db.trait_data(deref_trait).method_by_name(&name![deref_mut])
@@ -615,8 +611,8 @@ impl InferenceContext<'_> {
"We sort closures, so we should always have data for inner closures",
);
let mut cc = mem::take(&mut self.current_captures);
- cc.extend(captures.iter().filter(|x| self.is_upvar(&x.place)).map(|x| {
- CapturedItemWithoutTy { place: x.place.clone(), kind: x.kind, span: x.span }
+ cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
+ CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span }
}));
self.current_captures = cc;
}
@@ -694,7 +690,7 @@ impl InferenceContext<'_> {
},
},
}
- if self.result.pat_adjustments.get(&p).map_or(false, |x| !x.is_empty()) {
+ if self.result.pat_adjustments.get(&p).map_or(false, |it| !it.is_empty()) {
for_mut = BorrowKind::Unique;
}
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
@@ -706,9 +702,9 @@ impl InferenceContext<'_> {
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
let mut ty = None;
- if let Some(x) = self.result.expr_adjustments.get(&e) {
- if let Some(x) = x.last() {
- ty = Some(x.target.clone());
+ if let Some(it) = self.result.expr_adjustments.get(&e) {
+ if let Some(it) = it.last() {
+ ty = Some(it.target.clone());
}
}
ty.unwrap_or_else(|| self.expr_ty(e))
@@ -727,7 +723,7 @@ impl InferenceContext<'_> {
// FIXME: We handle closure as a special case, since chalk consider every closure as copy. We
// should probably let chalk know which closures are copy, but I don't know how doing it
// without creating query cycles.
- return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true);
+ return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true);
}
self.table.resolve_completely(ty).is_copy(self.db, self.owner)
}
@@ -748,7 +744,7 @@ impl InferenceContext<'_> {
}
fn minimize_captures(&mut self) {
- self.current_captures.sort_by_key(|x| x.place.projections.len());
+ self.current_captures.sort_by_key(|it| it.place.projections.len());
let mut hash_map = HashMap::<HirPlace, usize>::new();
let result = mem::take(&mut self.current_captures);
for item in result {
@@ -759,7 +755,7 @@ impl InferenceContext<'_> {
break Some(*k);
}
match it.next() {
- Some(x) => lookup_place.projections.push(x.clone()),
+ Some(it) => lookup_place.projections.push(it.clone()),
None => break None,
}
};
@@ -780,7 +776,7 @@ impl InferenceContext<'_> {
}
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
- let cnt = self.result.pat_adjustments.get(&pat).map(|x| x.len()).unwrap_or_default();
+ let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default();
place.projections = place
.projections
.iter()
@@ -894,10 +890,10 @@ impl InferenceContext<'_> {
fn closure_kind(&self) -> FnTrait {
let mut r = FnTrait::Fn;
- for x in &self.current_captures {
+ for it in &self.current_captures {
r = cmp::min(
r,
- match &x.kind {
+ match &it.kind {
CaptureKind::ByRef(BorrowKind::Unique | BorrowKind::Mut { .. }) => {
FnTrait::FnMut
}
@@ -933,7 +929,7 @@ impl InferenceContext<'_> {
}
self.minimize_captures();
let result = mem::take(&mut self.current_captures);
- let captures = result.into_iter().map(|x| x.with_ty(self)).collect::<Vec<_>>();
+ let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
self.result.closure_info.insert(closure, (captures, closure_kind));
closure_kind
}
@@ -973,20 +969,20 @@ impl InferenceContext<'_> {
fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> {
let mut deferred_closures = mem::take(&mut self.deferred_closures);
let mut dependents_count: FxHashMap<ClosureId, usize> =
- deferred_closures.keys().map(|x| (*x, 0)).collect();
+ deferred_closures.keys().map(|it| (*it, 0)).collect();
for (_, deps) in &self.closure_dependencies {
for dep in deps {
*dependents_count.entry(*dep).or_default() += 1;
}
}
let mut queue: Vec<_> =
- deferred_closures.keys().copied().filter(|x| dependents_count[x] == 0).collect();
+ deferred_closures.keys().copied().filter(|it| dependents_count[it] == 0).collect();
let mut result = vec![];
- while let Some(x) = queue.pop() {
- if let Some(d) = deferred_closures.remove(&x) {
- result.push((x, d));
+ while let Some(it) = queue.pop() {
+ if let Some(d) = deferred_closures.remove(&it) {
+ result.push((it, d));
}
- for dep in self.closure_dependencies.get(&x).into_iter().flat_map(|x| x.iter()) {
+ for dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) {
let cnt = dependents_count.get_mut(dep).unwrap();
*cnt -= 1;
if *cnt == 0 {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index 05a476f63..8e7e62c49 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -220,7 +220,7 @@ pub(crate) fn coerce(
Ok((adjustments, table.resolve_with_fallback(ty, &fallback)))
}
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
/// Unify two types, but may coerce the first one to the second one
/// using "implicit coercion rules" if needed.
pub(super) fn coerce(
@@ -239,7 +239,7 @@ impl<'a> InferenceContext<'a> {
}
}
-impl<'a> InferenceTable<'a> {
+impl InferenceTable<'_> {
/// Unify two types, but may coerce the first one to the second one
/// using "implicit coercion rules" if needed.
pub(crate) fn coerce(
@@ -377,7 +377,7 @@ impl<'a> InferenceTable<'a> {
let snapshot = self.snapshot();
- let mut autoderef = Autoderef::new(self, from_ty.clone());
+ let mut autoderef = Autoderef::new(self, from_ty.clone(), false);
let mut first_error = None;
let mut found = None;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 194471f00..8cbdae625 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -46,11 +46,11 @@ use crate::{
};
use super::{
- coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges, Expectation,
- InferenceContext, InferenceDiagnostic, TypeMismatch,
+ cast::CastCheck, coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges,
+ Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
};
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
let ty = self.infer_expr_inner(tgt_expr, expected);
if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
@@ -198,19 +198,6 @@ impl<'a> InferenceContext<'a> {
None => self.result.standard_types.never.clone(),
}
}
- &Expr::While { condition, body, label } => {
- self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
- this.infer_expr(
- condition,
- &Expectation::HasType(this.result.standard_types.bool_.clone()),
- );
- this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
- });
-
- // the body may not run, so it diverging doesn't mean we diverge
- self.diverges = Diverges::Maybe;
- TyBuilder::unit()
- }
Expr::Closure { body, args, ret_type, arg_types, closure_kind, capture_by: _ } => {
assert_eq!(args.len(), arg_types.len());
@@ -316,7 +303,7 @@ impl<'a> InferenceContext<'a> {
}
Expr::Call { callee, args, .. } => {
let callee_ty = self.infer_expr(*callee, &Expectation::none());
- let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false);
let (res, derefed_callee) = 'b: {
// manual loop to be able to access `derefs.table`
while let Some((callee_deref_ty, _)) = derefs.next() {
@@ -574,16 +561,8 @@ impl<'a> InferenceContext<'a> {
}
Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref);
- // FIXME: propagate the "castable to" expectation
- let inner_ty = self.infer_expr_no_expect(*expr);
- match (inner_ty.kind(Interner), cast_ty.kind(Interner)) {
- (TyKind::Ref(_, _, inner), TyKind::Raw(_, cast)) => {
- // FIXME: record invalid cast diagnostic in case of mismatch
- self.unify(inner, cast);
- }
- // FIXME check the other kinds of cast...
- _ => (),
- }
+ let expr_ty = self.infer_expr(*expr, &Expectation::Castable(cast_ty.clone()));
+ self.deferred_cast_checks.push(CastCheck::new(expr_ty, cast_ty.clone()));
cast_ty
}
Expr::Ref { expr, rawness, mutability } => {
@@ -928,7 +907,7 @@ impl<'a> InferenceContext<'a> {
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
if adjustments
.last()
- .map(|x| matches!(x.kind, Adjust::Borrow(_)))
+ .map(|it| matches!(it.kind, Adjust::Borrow(_)))
.unwrap_or(true)
{
// prefer reborrow to move
@@ -1385,7 +1364,7 @@ impl<'a> InferenceContext<'a> {
receiver_ty: &Ty,
name: &Name,
) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> {
- let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone());
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false);
let mut private_field = None;
let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
let (field_id, parameters) = match derefed_ty.kind(Interner) {
@@ -1449,6 +1428,13 @@ impl<'a> InferenceContext<'a> {
fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
+
+ if name.is_missing() {
+ // Bail out early, don't even try to look up field. Also, we don't issue an unresolved
+ // field diagnostic because this is a syntax error rather than a semantic error.
+ return self.err_ty();
+ }
+
match self.lookup_field(&receiver_ty, name) {
Some((ty, field_id, adjustments, is_public)) => {
self.write_expr_adj(receiver, adjustments);
@@ -1585,7 +1571,7 @@ impl<'a> InferenceContext<'a> {
output: Ty,
inputs: Vec<Ty>,
) -> Vec<Ty> {
- if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
+ if let Some(expected_ty) = expected_output.only_has_type(&mut self.table) {
self.table.fudge_inference(|table| {
if table.try_unify(&expected_ty, &output).is_ok() {
table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
@@ -1658,6 +1644,7 @@ impl<'a> InferenceContext<'a> {
// the parameter to coerce to the expected type (for example in
// `coerce_unsize_expected_type_4`).
let param_ty = self.normalize_associated_types_in(param_ty);
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
let expected = Expectation::rvalue_hint(self, expected_ty);
// infer with the expected type we have...
let ty = self.infer_expr_inner(arg, &expected);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
index 46f2e1d7d..396ca0044 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs
@@ -12,7 +12,7 @@ use crate::{lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, Ov
use super::InferenceContext;
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
pub(crate) fn infer_mut_body(&mut self) {
self.infer_mut_expr(self.body.body_expr, Mutability::Not);
}
@@ -69,16 +69,12 @@ impl<'a> InferenceContext<'a> {
self.infer_mut_expr(*tail, Mutability::Not);
}
}
- &Expr::While { condition: c, body, label: _ } => {
- self.infer_mut_expr(c, Mutability::Not);
- self.infer_mut_expr(body, Mutability::Not);
- }
- Expr::MethodCall { receiver: x, method_name: _, args, generic_args: _ }
- | Expr::Call { callee: x, args, is_assignee_expr: _ } => {
- self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*x)));
+ Expr::MethodCall { receiver: it, method_name: _, args, generic_args: _ }
+ | Expr::Call { callee: it, args, is_assignee_expr: _ } => {
+ self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*it)));
}
Expr::Match { expr, arms } => {
- let m = self.pat_iter_bound_mutability(arms.iter().map(|x| x.pat));
+ let m = self.pat_iter_bound_mutability(arms.iter().map(|it| it.pat));
self.infer_mut_expr(*expr, m);
for arm in arms.iter() {
self.infer_mut_expr(arm.expr, Mutability::Not);
@@ -96,7 +92,7 @@ impl<'a> InferenceContext<'a> {
}
}
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
- self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread))
+ self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
}
&Expr::Index { base, index } => {
if mutability == Mutability::Mut {
@@ -204,8 +200,8 @@ impl<'a> InferenceContext<'a> {
}
/// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions
- /// mutable. For example in `let (ref mut x0, ref x1) = *x;` we need to use `DerefMut` for `*x` but in
- /// `let (ref x0, ref x1) = *x;` we should use `Deref`.
+ /// mutable. For example in `let (ref mut x0, ref x1) = *it;` we need to use `DerefMut` for `*it` but in
+ /// `let (ref x0, ref x1) = *it;` we should use `Deref`.
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
let mut r = Mutability::Not;
self.body.walk_bindings_in_pat(pat, |b| {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index 2480f8bab..5da0ab76b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -56,7 +56,7 @@ impl PatLike for PatId {
}
}
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
/// Infers type for tuple struct pattern or its corresponding assignee expression.
///
/// Ellipses found in the original pattern or expression must be filtered out.
@@ -306,7 +306,7 @@ impl<'a> InferenceContext<'a> {
self.result
.pat_adjustments
.get(&pat)
- .and_then(|x| x.first())
+ .and_then(|it| it.first())
.unwrap_or(&self.result.type_of_pat[pat])
.clone()
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index e33d8f179..0fb71135b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -22,7 +22,7 @@ use crate::{
TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
};
-impl<'a> InferenceContext<'a> {
+impl InferenceContext<'_> {
pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
&mut self,
t: T,
@@ -91,7 +91,7 @@ pub(crate) fn unify(
let mut table = InferenceTable::new(db, env);
let vars = Substitution::from_iter(
Interner,
- tys.binders.iter(Interner).map(|x| match &x.kind {
+ tys.binders.iter(Interner).map(|it| match &it.kind {
chalk_ir::VariableKind::Ty(_) => {
GenericArgData::Ty(table.new_type_var()).intern(Interner)
}
@@ -252,7 +252,8 @@ impl<'a> InferenceTable<'a> {
// and registering an obligation. But it needs chalk support, so we handle the most basic
// case (a non associated const without generic parameters) manually.
if subst.len(Interner) == 0 {
- if let Ok(eval) = self.db.const_eval((*c_id).into(), subst.clone())
+ if let Ok(eval) =
+ self.db.const_eval((*c_id).into(), subst.clone(), None)
{
eval
} else {
@@ -547,7 +548,7 @@ impl<'a> InferenceTable<'a> {
table: &'a mut InferenceTable<'b>,
highest_known_var: InferenceVar,
}
- impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> {
+ impl TypeFolder<Interner> for VarFudger<'_, '_> {
fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
self
}
@@ -686,8 +687,8 @@ impl<'a> InferenceTable<'a> {
let mut arg_tys = vec![];
let arg_ty = TyBuilder::tuple(num_args)
- .fill(|x| {
- let arg = match x {
+ .fill(|it| {
+ let arg = match it {
ParamKind::Type => self.new_type_var(),
ParamKind::Const(ty) => {
never!("Tuple with const parameter");
@@ -753,7 +754,7 @@ impl<'a> InferenceTable<'a> {
{
fold_tys_and_consts(
ty,
- |x, _| match x {
+ |it, _| match it {
Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)),
Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)),
},
@@ -785,7 +786,7 @@ impl<'a> InferenceTable<'a> {
crate::ConstScalar::Unknown => self.new_const_var(data.ty.clone()),
// try to evaluate unevaluated const. Replace with new var if const eval failed.
crate::ConstScalar::UnevaluatedConst(id, subst) => {
- if let Ok(eval) = self.db.const_eval(*id, subst.clone()) {
+ if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) {
eval
} else {
self.new_const_var(data.ty.clone())
@@ -798,7 +799,7 @@ impl<'a> InferenceTable<'a> {
}
}
-impl<'a> fmt::Debug for InferenceTable<'a> {
+impl fmt::Debug for InferenceTable<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish()
}
@@ -826,7 +827,7 @@ mod resolve {
pub(super) var_stack: &'a mut Vec<InferenceVar>,
pub(super) fallback: F,
}
- impl<'a, 'b, F> TypeFolder<Interner> for Resolver<'a, 'b, F>
+ impl<F> TypeFolder<Interner> for Resolver<'_, '_, F>
where
F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg,
{
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index 35d3407c1..b15339d44 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -1,13 +1,12 @@
//! Compute the binary representation of a type
-use base_db::CrateId;
use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
use hir_def::{
layout::{
Abi, FieldsShape, Integer, LayoutCalculator, LayoutS, Primitive, ReprOptions, Scalar, Size,
StructKind, TargetDataLayout, WrappingRange,
},
- LocalEnumVariantId, LocalFieldId,
+ LocalEnumVariantId, LocalFieldId, StructId,
};
use la_arena::{Idx, RawIdx};
use stdx::never;
@@ -15,7 +14,7 @@ use triomphe::Arc;
use crate::{
consteval::try_const_usize, db::HirDatabase, infer::normalize, layout::adt::struct_variant_idx,
- utils::ClosureSubst, Interner, Substitution, TraitEnvironment, Ty,
+ utils::ClosureSubst, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
};
pub use self::{
@@ -24,8 +23,8 @@ pub use self::{
};
macro_rules! user_error {
- ($x: expr) => {
- return Err(LayoutError::UserError(format!($x)))
+ ($it: expr) => {
+ return Err(LayoutError::UserError(format!($it)))
};
}
@@ -61,7 +60,6 @@ pub enum LayoutError {
}
struct LayoutCx<'a> {
- krate: CrateId,
target: &'a TargetDataLayout,
}
@@ -77,18 +75,101 @@ impl<'a> LayoutCalculator for LayoutCx<'a> {
}
}
+// FIXME: move this to the `rustc_abi`.
+fn layout_of_simd_ty(
+ db: &dyn HirDatabase,
+ id: StructId,
+ subst: &Substitution,
+ env: Arc<TraitEnvironment>,
+ dl: &TargetDataLayout,
+) -> Result<Arc<Layout>, LayoutError> {
+ let fields = db.field_types(id.into());
+
+ // Supported SIMD vectors are homogeneous ADTs with at least one field:
+ //
+ // * #[repr(simd)] struct S(T, T, T, T);
+ // * #[repr(simd)] struct S { it: T, y: T, z: T, w: T }
+ // * #[repr(simd)] struct S([T; 4])
+ //
+ // where T is a primitive scalar (integer/float/pointer).
+
+ let f0_ty = match fields.iter().next() {
+ Some(it) => it.1.clone().substitute(Interner, subst),
+ None => {
+ user_error!("simd type with zero fields");
+ }
+ };
+
+ // The element type and number of elements of the SIMD vector
+ // are obtained from:
+ //
+ // * the element type and length of the single array field, if
+ // the first field is of array type, or
+ //
+ // * the homogeneous field type and the number of fields.
+ let (e_ty, e_len, is_array) = if let TyKind::Array(e_ty, _) = f0_ty.kind(Interner) {
+ // Extract the number of elements from the layout of the array field:
+ let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), env.clone())?.fields else {
+ user_error!("Array with non array layout");
+ };
+
+ (e_ty.clone(), count, true)
+ } else {
+ // First ADT field is not an array:
+ (f0_ty, fields.iter().count() as u64, false)
+ };
+
+ // Compute the ABI of the element type:
+ let e_ly = db.layout_of_ty(e_ty, env.clone())?;
+ let Abi::Scalar(e_abi) = e_ly.abi else {
+ user_error!("simd type with inner non scalar type");
+ };
+
+ // Compute the size and alignment of the vector:
+ let size = e_ly.size.checked_mul(e_len, dl).ok_or(LayoutError::SizeOverflow)?;
+ let align = dl.vector_align(size);
+ let size = size.align_to(align.abi);
+
+ // Compute the placement of the vector fields:
+ let fields = if is_array {
+ FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() }
+ } else {
+ FieldsShape::Array { stride: e_ly.size, count: e_len }
+ };
+
+ Ok(Arc::new(Layout {
+ variants: Variants::Single { index: struct_variant_idx() },
+ fields,
+ abi: Abi::Vector { element: e_abi, count: e_len },
+ largest_niche: e_ly.largest_niche,
+ size,
+ align,
+ }))
+}
+
pub fn layout_of_ty_query(
db: &dyn HirDatabase,
ty: Ty,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
- let cx = LayoutCx { krate, target: &target };
+ let krate = trait_env.krate;
+ let Some(target) = db.target_data_layout(krate) else {
+ return Err(LayoutError::TargetLayoutNotAvailable);
+ };
+ let cx = LayoutCx { target: &target };
let dl = &*cx.current_data_layout();
- let trait_env = Arc::new(TraitEnvironment::empty(krate));
- let ty = normalize(db, trait_env, ty.clone());
+ let ty = normalize(db, trait_env.clone(), ty.clone());
let result = match ty.kind(Interner) {
- TyKind::Adt(AdtId(def), subst) => return db.layout_of_adt(*def, subst.clone(), krate),
+ TyKind::Adt(AdtId(def), subst) => {
+ if let hir_def::AdtId::StructId(s) = def {
+ let data = db.struct_data(*s);
+ let repr = data.repr.unwrap_or_default();
+ if repr.simd() {
+ return layout_of_simd_ty(db, *s, subst, trait_env.clone(), &target);
+ }
+ };
+ return db.layout_of_adt(*def, subst.clone(), trait_env.clone());
+ }
TyKind::Scalar(s) => match s {
chalk_ir::Scalar::Bool => Layout::scalar(
dl,
@@ -145,9 +226,9 @@ pub fn layout_of_ty_query(
let fields = tys
.iter(Interner)
- .map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate))
+ .map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
- let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
+ let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
@@ -155,7 +236,7 @@ pub fn layout_of_ty_query(
let count = try_const_usize(db, &count).ok_or(LayoutError::UserError(
"unevaluated or mistyped const generic parameter".to_string(),
))? as u64;
- let element = db.layout_of_ty(element.clone(), krate)?;
+ let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
@@ -176,7 +257,7 @@ pub fn layout_of_ty_query(
}
}
TyKind::Slice(element) => {
- let element = db.layout_of_ty(element.clone(), krate)?;
+ let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: element.size, count: 0 },
@@ -198,7 +279,15 @@ pub fn layout_of_ty_query(
// return Ok(tcx.mk_layout(LayoutS::scalar(cx, data_ptr)));
// }
- let unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone());
+ let mut unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone());
+ if let TyKind::AssociatedType(id, subst) = unsized_part.kind(Interner) {
+ unsized_part = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
+ associated_ty_id: *id,
+ substitution: subst.clone(),
+ }))
+ .intern(Interner);
+ }
+ unsized_part = normalize(db, trait_env.clone(), unsized_part);
let metadata = match unsized_part.kind(Interner) {
TyKind::Slice(_) | TyKind::Str => {
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
@@ -252,7 +341,7 @@ pub fn layout_of_ty_query(
match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = db.infer(func.into());
- return db.layout_of_ty(infer.type_of_rpit[idx].clone(), krate);
+ return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env.clone());
}
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
return Err(LayoutError::NotImplemented)
@@ -265,14 +354,14 @@ pub fn layout_of_ty_query(
let (captures, _) = infer.closure_info(c);
let fields = captures
.iter()
- .map(|x| {
+ .map(|it| {
db.layout_of_ty(
- x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
- krate,
+ it.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()),
+ trait_env.clone(),
)
})
.collect::<Result<Vec<_>, _>>()?;
- let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>();
+ let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
.ok_or(LayoutError::Unknown)?
@@ -281,8 +370,16 @@ pub fn layout_of_ty_query(
return Err(LayoutError::NotImplemented)
}
TyKind::Error => return Err(LayoutError::HasErrorType),
- TyKind::AssociatedType(_, _)
- | TyKind::Alias(_)
+ TyKind::AssociatedType(id, subst) => {
+ // Try again with `TyKind::Alias` to normalize the associated type.
+ let ty = TyKind::Alias(chalk_ir::AliasTy::Projection(ProjectionTy {
+ associated_ty_id: *id,
+ substitution: subst.clone(),
+ }))
+ .intern(Interner);
+ return db.layout_of_ty(ty, trait_env);
+ }
+ TyKind::Alias(_)
| TyKind::Placeholder(_)
| TyKind::BoundVar(_)
| TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder),
@@ -294,7 +391,7 @@ pub fn layout_of_ty_recover(
_: &dyn HirDatabase,
_: &[String],
_: &Ty,
- _: &CrateId,
+ _: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}
@@ -315,7 +412,10 @@ fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
let data = db.struct_data(*i);
let mut it = data.variant_data.fields().iter().rev();
match it.next() {
- Some((f, _)) => field_ty(db, (*i).into(), f, subst),
+ Some((f, _)) => {
+ let last_field_ty = field_ty(db, (*i).into(), f, subst);
+ struct_tail_erasing_lifetimes(db, last_field_ty)
+ }
None => pointee,
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index bd2752a71..1c92e80f3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -2,7 +2,6 @@
use std::{cmp, ops::Bound};
-use base_db::CrateId;
use hir_def::{
data::adt::VariantData,
layout::{Integer, LayoutCalculator, ReprOptions, TargetDataLayout},
@@ -16,7 +15,7 @@ use crate::{
db::HirDatabase,
lang_items::is_unsafe_cell,
layout::{field_ty, Layout, LayoutError, RustcEnumVariantIdx},
- Substitution,
+ Substitution, TraitEnvironment,
};
use super::LayoutCx;
@@ -29,15 +28,18 @@ pub fn layout_of_adt_query(
db: &dyn HirDatabase,
def: AdtId,
subst: Substitution,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
- let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
- let cx = LayoutCx { krate, target: &target };
+ let krate = trait_env.krate;
+ let Some(target) = db.target_data_layout(krate) else {
+ return Err(LayoutError::TargetLayoutNotAvailable);
+ };
+ let cx = LayoutCx { target: &target };
let dl = cx.current_data_layout();
let handle_variant = |def: VariantId, var: &VariantData| {
var.fields()
.iter()
- .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), cx.krate))
+ .map(|(fd, _)| db.layout_of_ty(field_ty(db, def, fd, &subst), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()
};
let (variants, repr) = match def {
@@ -70,9 +72,9 @@ pub fn layout_of_adt_query(
};
let variants = variants
.iter()
- .map(|x| x.iter().map(|x| &**x).collect::<Vec<_>>())
+ .map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>())
.collect::<SmallVec<[_; 1]>>();
- let variants = variants.iter().map(|x| x.iter().collect()).collect();
+ let variants = variants.iter().map(|it| it.iter().collect()).collect();
let result = if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
} else {
@@ -103,7 +105,7 @@ pub fn layout_of_adt_query(
&& variants
.iter()
.next()
- .and_then(|x| x.last().map(|x| x.is_unsized()))
+ .and_then(|it| it.last().map(|it| !it.is_unsized()))
.unwrap_or(true),
)
.ok_or(LayoutError::SizeOverflow)?
@@ -116,9 +118,9 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
let get = |name| {
let attr = attrs.by_key(name).tt_values();
for tree in attr {
- if let Some(x) = tree.token_trees.first() {
- if let Ok(x) = x.to_string().parse() {
- return Bound::Included(x);
+ if let Some(it) = tree.token_trees.first() {
+ if let Ok(it) = it.to_string().parse() {
+ return Bound::Included(it);
}
}
}
@@ -132,7 +134,7 @@ pub fn layout_of_adt_recover(
_: &[String],
_: &AdtId,
_: &Substitution,
- _: &CrateId,
+ _: &Arc<TraitEnvironment>,
) -> Result<Arc<Layout>, LayoutError> {
user_error!("infinite sized recursive type");
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index 0ff8c532d..333ad473a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -26,7 +26,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
);
let (db, file_ids) = TestDB::with_many_files(&ra_fixture);
- let (adt_or_type_alias_id, module_id) = file_ids
+ let adt_or_type_alias_id = file_ids
.into_iter()
.find_map(|file_id| {
let module_id = db.module_for_file(file_id);
@@ -47,7 +47,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
}
_ => None,
})?;
- Some((adt_or_type_alias_id, module_id))
+ Some(adt_or_type_alias_id)
})
.unwrap();
let goal_ty = match adt_or_type_alias_id {
@@ -58,7 +58,13 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
db.ty(ty_id.into()).substitute(Interner, &Substitution::empty(Interner))
}
};
- db.layout_of_ty(goal_ty, module_id.krate())
+ db.layout_of_ty(
+ goal_ty,
+ db.trait_environment(match adt_or_type_alias_id {
+ Either::Left(adt) => hir_def::GenericDefId::AdtId(adt),
+ Either::Right(ty) => hir_def::GenericDefId::TypeAliasId(ty),
+ }),
+ )
}
/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait`
@@ -72,7 +78,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
- let adt_id = scope
+ let function_id = scope
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
@@ -82,11 +88,11 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
_ => None,
})
.unwrap();
- let hir_body = db.body(adt_id.into());
+ let hir_body = db.body(function_id.into());
let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0;
- let infer = db.infer(adt_id.into());
+ let infer = db.infer(function_id.into());
let goal_ty = infer.type_of_binding[b].clone();
- db.layout_of_ty(goal_ty, module_id.krate())
+ db.layout_of_ty(goal_ty, db.trait_environment(function_id.into()))
}
#[track_caller]
@@ -271,6 +277,20 @@ struct Goal(Foo<S>);
}
#[test]
+fn simd_types() {
+ check_size_and_align(
+ r#"
+ #[repr(simd)]
+ struct SimdType(i64, i64);
+ struct Goal(SimdType);
+ "#,
+ "",
+ 16,
+ 16,
+ );
+}
+
+#[test]
fn return_position_impl_trait() {
size_and_align_expr! {
trait T {}
@@ -344,6 +364,24 @@ fn return_position_impl_trait() {
}
#[test]
+fn unsized_ref() {
+ size_and_align! {
+ struct S1([u8]);
+ struct S2(S1);
+ struct S3(i32, str);
+ struct S4(u64, S3);
+ #[allow(dead_code)]
+ struct S5 {
+ field1: u8,
+ field2: i16,
+ field_last: S4,
+ }
+
+ struct Goal(&'static S1, &'static S2, &'static S3, &'static S4, &'static S5);
+ }
+}
+
+#[test]
fn enums() {
size_and_align! {
enum Goal {
@@ -369,11 +407,11 @@ fn tuple() {
}
#[test]
-fn non_zero() {
+fn non_zero_and_non_null() {
size_and_align! {
- minicore: non_zero, option;
- use core::num::NonZeroU8;
- struct Goal(Option<NonZeroU8>);
+ minicore: non_zero, non_null, option;
+ use core::{num::NonZeroU8, ptr::NonNull};
+ struct Goal(Option<NonZeroU8>, Option<NonNull<i32>>);
}
}
@@ -432,3 +470,41 @@ fn enums_with_discriminants() {
}
}
}
+
+#[test]
+fn core_mem_discriminant() {
+ size_and_align! {
+ minicore: discriminant;
+ struct S(i32, u64);
+ struct Goal(core::mem::Discriminant<S>);
+ }
+ size_and_align! {
+ minicore: discriminant;
+ #[repr(u32)]
+ enum S {
+ A,
+ B,
+ C,
+ }
+ struct Goal(core::mem::Discriminant<S>);
+ }
+ size_and_align! {
+ minicore: discriminant;
+ enum S {
+ A(i32),
+ B(i64),
+ C(u8),
+ }
+ struct Goal(core::mem::Discriminant<S>);
+ }
+ size_and_align! {
+ minicore: discriminant;
+ #[repr(C, u16)]
+ enum S {
+ A(i32),
+ B(i64) = 200,
+ C = 1000,
+ }
+ struct Goal(core::mem::Discriminant<S>);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 1a4d003bf..b3ca2a222 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -180,9 +180,16 @@ impl MemoryMap {
/// allocator function as `f` and it will return a mapping of old addresses to new addresses.
fn transform_addresses(
&self,
- mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
+ mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
) -> Result<HashMap<usize, usize>, MirEvalError> {
- self.memory.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
+ self.memory
+ .iter()
+ .map(|x| {
+ let addr = *x.0;
+ let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
+ Ok((addr, f(x.1, align)?))
+ })
+ .collect()
}
fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 9951a1c75..2837f400b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -23,7 +23,7 @@ use hir_def::{
generics::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
},
- lang_item::{lang_attr, LangItem},
+ lang_item::LangItem,
nameres::MacroSubNs,
path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs},
@@ -959,10 +959,10 @@ impl<'a> TyLoweringContext<'a> {
}
pub(crate) fn lower_where_predicate(
- &'a self,
- where_predicate: &'a WherePredicate,
+ &self,
+ where_predicate: &WherePredicate,
ignore_bindings: bool,
- ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ ) -> impl Iterator<Item = QuantifiedWhereClause> {
match where_predicate {
WherePredicate::ForLifetime { target, bound, .. }
| WherePredicate::TypeBound { target, bound } => {
@@ -1012,7 +1012,7 @@ impl<'a> TyLoweringContext<'a> {
// (So ideally, we'd only ignore `~const Drop` here)
// - `Destruct` impls are built-in in 1.62 (current nightly as of 08-04-2022), so until
// the builtin impls are supported by Chalk, we ignore them here.
- if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
+ if let Some(lang) = self.db.lang_attr(tr.hir_trait_id().into()) {
if matches!(lang, LangItem::Drop | LangItem::Destruct) {
return false;
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index ab6430e8f..f3a5f69b2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -534,7 +534,7 @@ impl ReceiverAdjustments {
let mut ty = table.resolve_ty_shallow(&ty);
let mut adjust = Vec::new();
for _ in 0..self.autoderefs {
- match autoderef::autoderef_step(table, ty.clone()) {
+ match autoderef::autoderef_step(table, ty.clone(), true) {
None => {
never!("autoderef not possible for {:?}", ty);
ty = TyKind::Error.intern(Interner);
@@ -559,10 +559,10 @@ impl ReceiverAdjustments {
adjust.push(a);
}
if self.unsize_array {
- ty = 'x: {
+ ty = 'it: {
if let TyKind::Ref(m, l, inner) = ty.kind(Interner) {
if let TyKind::Array(inner, _) = inner.kind(Interner) {
- break 'x TyKind::Ref(
+ break 'it TyKind::Ref(
m.clone(),
l.clone(),
TyKind::Slice(inner.clone()).intern(Interner),
@@ -665,13 +665,21 @@ pub fn is_dyn_method(
};
let self_ty = trait_ref.self_type_parameter(Interner);
if let TyKind::Dyn(d) = self_ty.kind(Interner) {
- let is_my_trait_in_bounds =
- d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() {
- // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
- // what the generics are, we are sure that the method is come from the vtable.
- WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id,
- _ => false,
- });
+ let is_my_trait_in_bounds = d
+ .bounds
+ .skip_binders()
+ .as_slice(Interner)
+ .iter()
+ .map(|it| it.skip_binders())
+ .flat_map(|it| match it {
+ WhereClause::Implemented(tr) => {
+ all_super_traits(db.upcast(), from_chalk_trait_id(tr.trait_id))
+ }
+ _ => smallvec![],
+ })
+ // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter
+ // what the generics are, we are sure that the method is come from the vtable.
+ .any(|x| x == trait_id);
if is_my_trait_in_bounds {
return Some(fn_params);
}
@@ -682,14 +690,14 @@ pub fn is_dyn_method(
/// Looks up the impl method that actually runs for the trait method `func`.
///
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
-pub fn lookup_impl_method(
+pub(crate) fn lookup_impl_method_query(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
func: FunctionId,
fn_subst: Substitution,
) -> (FunctionId, Substitution) {
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
- return (func, fn_subst)
+ return (func, fn_subst);
};
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
let fn_params = fn_subst.len(Interner) - trait_params;
@@ -699,8 +707,8 @@ pub fn lookup_impl_method(
};
let name = &db.function_data(func).name;
- let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
- .and_then(|assoc| {
+ let Some((impl_fn, impl_subst)) =
+ lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name).and_then(|assoc| {
if let (AssocItemId::FunctionId(id), subst) = assoc {
Some((id, subst))
} else {
@@ -731,7 +739,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
let impls = db.trait_impls_in_deps(env.krate);
let self_impls = match self_ty.kind(Interner) {
TyKind::Adt(id, _) => {
- id.0.module(db.upcast()).containing_block().map(|x| db.trait_impls_in_block(x))
+ id.0.module(db.upcast()).containing_block().map(|it| db.trait_impls_in_block(it))
}
_ => None,
};
@@ -895,8 +903,8 @@ pub fn iterate_method_candidates_dyn(
// (just as rustc does an autoderef and then autoref again).
// We have to be careful about the order we're looking at candidates
- // in here. Consider the case where we're resolving `x.clone()`
- // where `x: &Vec<_>`. This resolves to the clone method with self
+ // in here. Consider the case where we're resolving `it.clone()`
+ // where `it: &Vec<_>`. This resolves to the clone method with self
// type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
// the receiver type exactly matches before cases where we have to
// do autoref. But in the autoderef steps, the `&_` self type comes
@@ -1012,8 +1020,8 @@ fn iterate_method_candidates_by_receiver(
let snapshot = table.snapshot();
// We're looking for methods with *receiver* type receiver_ty. These could
// be found in any of the derefs of receiver_ty, so we have to go through
- // that.
- let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ // that, including raw derefs.
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true);
while let Some((self_ty, _)) = autoderef.next() {
iterate_inherent_methods(
&self_ty,
@@ -1028,7 +1036,7 @@ fn iterate_method_candidates_by_receiver(
table.rollback_to(snapshot);
- let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true);
while let Some((self_ty, _)) = autoderef.next() {
iterate_trait_method_candidates(
&self_ty,
@@ -1480,8 +1488,8 @@ fn generic_implements_goal(
.push(self_ty.value.clone())
.fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
.build();
- kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
- let vk = match x.data(Interner) {
+ kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|it| {
+ let vk = match it.data(Interner) {
chalk_ir::GenericArgData::Ty(_) => {
chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
}
@@ -1504,7 +1512,7 @@ fn autoderef_method_receiver(
ty: Ty,
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
let mut deref_chain: Vec<_> = Vec::new();
- let mut autoderef = autoderef::Autoderef::new(table, ty);
+ let mut autoderef = autoderef::Autoderef::new(table, ty, false);
while let Some((ty, derefs)) = autoderef.next() {
deref_chain.push((
autoderef.table.canonicalize(ty).value,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
index 2345bab0b..4723c25ed 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -3,9 +3,14 @@
use std::{fmt::Display, iter};
use crate::{
- consteval::usize_const, db::HirDatabase, display::HirDisplay, infer::PointerCast,
- lang_items::is_box, mapping::ToChalk, CallableDefId, ClosureId, Const, ConstScalar,
- InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind,
+ consteval::usize_const,
+ db::HirDatabase,
+ display::HirDisplay,
+ infer::{normalize, PointerCast},
+ lang_items::is_box,
+ mapping::ToChalk,
+ CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
+ Substitution, TraitEnvironment, Ty, TyKind,
};
use base_db::CrateId;
use chalk_ir::Mutability;
@@ -22,7 +27,9 @@ mod pretty;
mod monomorphization;
pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
-pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap};
+pub use eval::{
+ interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap,
+};
pub use lower::{
lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
};
@@ -32,6 +39,7 @@ pub use monomorphization::{
};
use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never};
+use triomphe::Arc;
use super::consteval::{intern_const_scalar, try_const_usize};
@@ -129,13 +137,21 @@ pub enum ProjectionElem<V, T> {
impl<V, T> ProjectionElem<V, T> {
pub fn projected_ty(
&self,
- base: Ty,
+ mut base: Ty,
db: &dyn HirDatabase,
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
krate: CrateId,
) -> Ty {
+ if matches!(base.kind(Interner), TyKind::Alias(_) | TyKind::AssociatedType(..)) {
+ base = normalize(
+ db,
+ // FIXME: we should get this from caller
+ Arc::new(TraitEnvironment::empty(krate)),
+ base,
+ );
+ }
match self {
- ProjectionElem::Deref => match &base.data(Interner).kind {
+ ProjectionElem::Deref => match &base.kind(Interner) {
TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
TyKind::Adt(adt, subst) if is_box(db, adt.0) => {
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
@@ -145,7 +161,7 @@ impl<V, T> ProjectionElem<V, T> {
return TyKind::Error.intern(Interner);
}
},
- ProjectionElem::Field(f) => match &base.data(Interner).kind {
+ ProjectionElem::Field(f) => match &base.kind(Interner) {
TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
}
@@ -154,7 +170,7 @@ impl<V, T> ProjectionElem<V, T> {
return TyKind::Error.intern(Interner);
}
},
- ProjectionElem::TupleOrClosureField(f) => match &base.data(Interner).kind {
+ ProjectionElem::TupleOrClosureField(f) => match &base.kind(Interner) {
TyKind::Tuple(_, subst) => subst
.as_slice(Interner)
.get(*f)
@@ -171,7 +187,7 @@ impl<V, T> ProjectionElem<V, T> {
}
},
ProjectionElem::ConstantIndex { .. } | ProjectionElem::Index(_) => {
- match &base.data(Interner).kind {
+ match &base.kind(Interner) {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => {
never!("Overloaded index is not a projection");
@@ -179,7 +195,7 @@ impl<V, T> ProjectionElem<V, T> {
}
}
}
- &ProjectionElem::Subslice { from, to } => match &base.data(Interner).kind {
+ &ProjectionElem::Subslice { from, to } => match &base.kind(Interner) {
TyKind::Array(inner, c) => {
let next_c = usize_const(
db,
@@ -218,6 +234,7 @@ impl Place {
self.local == child.local && child.projection.starts_with(&self.projection)
}
+ /// The place itself is not included
fn iterate_over_parents(&self) -> impl Iterator<Item = Place> + '_ {
(0..self.projection.len())
.map(|x| &self.projection[0..x])
@@ -321,8 +338,8 @@ impl SwitchTargets {
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Terminator {
- span: MirSpan,
- kind: TerminatorKind,
+ pub span: MirSpan,
+ pub kind: TerminatorKind,
}
#[derive(Debug, PartialEq, Eq, Clone)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index a5dd0182e..ad98e8fa1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -52,7 +52,7 @@ fn all_mir_bodies(
let closures = body.closures.clone();
Box::new(
iter::once(Ok(body))
- .chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
+ .chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
)
}
Err(e) => Box::new(iter::once(Err(e))),
@@ -62,7 +62,7 @@ fn all_mir_bodies(
Ok(body) => {
let closures = body.closures.clone();
Box::new(
- iter::once(Ok(body)).chain(closures.into_iter().flat_map(|x| for_closure(db, x))),
+ iter::once(Ok(body)).chain(closures.into_iter().flat_map(|it| for_closure(db, it))),
)
}
Err(e) => Box::new(iter::once(Err(e))),
@@ -171,7 +171,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
}
TerminatorKind::Call { func, args, .. } => {
for_operand(func, terminator.span);
- args.iter().for_each(|x| for_operand(x, terminator.span));
+ args.iter().for_each(|it| for_operand(it, terminator.span));
}
TerminatorKind::Assert { cond, .. } => {
for_operand(cond, terminator.span);
@@ -245,7 +245,7 @@ fn ever_initialized_map(
body: &MirBody,
) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
- body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
+ body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect();
fn dfs(
db: &dyn HirDatabase,
body: &MirBody,
@@ -271,7 +271,10 @@ fn ever_initialized_map(
}
}
let Some(terminator) = &block.terminator else {
- never!("Terminator should be none only in construction.\nThe body:\n{}", body.pretty_print(db));
+ never!(
+ "Terminator should be none only in construction.\nThe body:\n{}",
+ body.pretty_print(db)
+ );
return;
};
let targets = match &terminator.kind {
@@ -311,7 +314,7 @@ fn ever_initialized_map(
result[body.start_block].insert(l, true);
dfs(db, body, body.start_block, l, &mut result);
}
- for l in body.locals.iter().map(|x| x.0) {
+ for l in body.locals.iter().map(|it| it.0) {
if !result[body.start_block].contains_idx(l) {
result[body.start_block].insert(l, false);
dfs(db, body, body.start_block, l, &mut result);
@@ -325,10 +328,10 @@ fn mutability_of_locals(
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let mut result: ArenaMap<LocalId, MutabilityReason> =
- body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
+ body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect();
let mut push_mut_span = |local, span| match &mut result[local] {
MutabilityReason::Mut { spans } => spans.push(span),
- x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
+ it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] },
};
let ever_init_maps = ever_initialized_map(db, body);
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index 9acf9d39e..9e30eed56 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -1,6 +1,13 @@
//! This module provides a MIR interpreter, which is used in const eval.
-use std::{borrow::Cow, collections::HashMap, fmt::Write, iter, ops::Range};
+use std::{
+ borrow::Cow,
+ cell::RefCell,
+ collections::{HashMap, HashSet},
+ fmt::Write,
+ iter, mem,
+ ops::Range,
+};
use base_db::{CrateId, FileId};
use chalk_ir::Mutability;
@@ -8,12 +15,13 @@ use either::Either;
use hir_def::{
builtin_type::BuiltinType,
data::adt::{StructFlags, VariantData},
- lang_item::{lang_attr, LangItem},
+ lang_item::LangItem,
layout::{TagEncoding, Variants},
- AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId,
- VariantId,
+ resolver::{HasResolver, TypeNs, ValueNs},
+ AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
+ StaticId, VariantId,
};
-use hir_expand::InFile;
+use hir_expand::{mod_path::ModPath, InFile};
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet};
@@ -28,7 +36,7 @@ use crate::{
infer::PointerCast,
layout::{Layout, LayoutError, RustcEnumVariantIdx},
mapping::from_chalk,
- method_resolution::{is_dyn_method, lookup_impl_method},
+ method_resolution::{is_dyn_method, lookup_impl_const},
name, static_lifetime,
traits::FnTrait,
utils::{detect_variant_from_bytes, ClosureSubst},
@@ -37,8 +45,9 @@ use crate::{
};
use super::{
- return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan, Operand,
- Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp,
+ return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError,
+ MirSpan, Operand, Place, PlaceElem, ProjectionElem, Rvalue, StatementKind, TerminatorKind,
+ UnOp,
};
mod shim;
@@ -48,15 +57,15 @@ mod tests;
macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
- Ok(x) => x,
+ Ok(it) => it,
Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))),
}))
};
}
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirEvalError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirEvalError::NotSupported(format!($it)))
};
}
@@ -67,18 +76,22 @@ pub struct VTableMap {
}
impl VTableMap {
+ const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id.
+
fn id(&mut self, ty: Ty) -> usize {
- if let Some(x) = self.ty_to_id.get(&ty) {
- return *x;
+ if let Some(it) = self.ty_to_id.get(&ty) {
+ return *it;
}
- let id = self.id_to_ty.len();
+ let id = self.id_to_ty.len() + VTableMap::OFFSET;
self.id_to_ty.push(ty.clone());
self.ty_to_id.insert(ty, id);
id
}
pub(crate) fn ty(&self, id: usize) -> Result<&Ty> {
- self.id_to_ty.get(id).ok_or(MirEvalError::InvalidVTableId(id))
+ id.checked_sub(VTableMap::OFFSET)
+ .and_then(|id| self.id_to_ty.get(id))
+ .ok_or(MirEvalError::InvalidVTableId(id))
}
fn ty_of_bytes(&self, bytes: &[u8]) -> Result<&Ty> {
@@ -114,11 +127,25 @@ impl TlsData {
}
}
+struct StackFrame {
+ locals: Locals,
+ destination: Option<BasicBlockId>,
+ prev_stack_ptr: usize,
+ span: (MirSpan, DefWithBodyId),
+}
+
+#[derive(Clone)]
+enum MirOrDynIndex {
+ Mir(Arc<MirBody>),
+ Dyn(usize),
+}
+
pub struct Evaluator<'a> {
db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
stack: Vec<u8>,
heap: Vec<u8>,
+ code_stack: Vec<StackFrame>,
/// Stores the global location of the statics. We const evaluate every static first time we need it
/// and see it's missing, then we add it to this to reuse.
static_locations: FxHashMap<StaticId, Address>,
@@ -127,8 +154,21 @@ pub struct Evaluator<'a> {
/// time of use.
vtable_map: VTableMap,
thread_local_storage: TlsData,
+ random_state: oorandom::Rand64,
stdout: Vec<u8>,
stderr: Vec<u8>,
+ layout_cache: RefCell<FxHashMap<Ty, Arc<Layout>>>,
+ projected_ty_cache: RefCell<FxHashMap<(Ty, PlaceElem), Ty>>,
+ not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
+ mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
+ /// Constantly dropping and creating `Locals` is very costly. We store
+ /// old locals that we normaly want to drop here, to reuse their allocations
+ /// later.
+ unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
+ cached_ptr_size: usize,
+ cached_fn_trait_func: Option<FunctionId>,
+ cached_fn_mut_trait_func: Option<FunctionId>,
+ cached_fn_once_trait_func: Option<FunctionId>,
crate_id: CrateId,
// FIXME: This is a workaround, see the comment on `interpret_mir`
assert_placeholder_ty_is_unused: bool,
@@ -136,6 +176,8 @@ pub struct Evaluator<'a> {
execution_limit: usize,
/// An additional limit on stack depth, to prevent stack overflow
stack_depth_limit: usize,
+ /// Maximum count of bytes that heap and stack can grow
+ memory_limit: usize,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -192,7 +234,7 @@ impl IntervalAndTy {
addr: Address,
ty: Ty,
evaluator: &Evaluator<'_>,
- locals: &Locals<'_>,
+ locals: &Locals,
) -> Result<IntervalAndTy> {
let size = evaluator.size_of_sized(&ty, locals, "type of interval")?;
Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
@@ -226,18 +268,28 @@ impl IntervalOrOwned {
}
}
+#[cfg(target_pointer_width = "64")]
+const STACK_OFFSET: usize = 1 << 60;
+#[cfg(target_pointer_width = "64")]
+const HEAP_OFFSET: usize = 1 << 59;
+
+#[cfg(target_pointer_width = "32")]
+const STACK_OFFSET: usize = 1 << 30;
+#[cfg(target_pointer_width = "32")]
+const HEAP_OFFSET: usize = 1 << 29;
+
impl Address {
- fn from_bytes(x: &[u8]) -> Result<Self> {
- Ok(Address::from_usize(from_bytes!(usize, x)))
+ fn from_bytes(it: &[u8]) -> Result<Self> {
+ Ok(Address::from_usize(from_bytes!(usize, it)))
}
- fn from_usize(x: usize) -> Self {
- if x > usize::MAX / 2 {
- Stack(x - usize::MAX / 2)
- } else if x > usize::MAX / 4 {
- Heap(x - usize::MAX / 4)
+ fn from_usize(it: usize) -> Self {
+ if it > STACK_OFFSET {
+ Stack(it - STACK_OFFSET)
+ } else if it > HEAP_OFFSET {
+ Heap(it - HEAP_OFFSET)
} else {
- Invalid(x)
+ Invalid(it)
}
}
@@ -247,23 +299,23 @@ impl Address {
fn to_usize(&self) -> usize {
let as_num = match self {
- Stack(x) => *x + usize::MAX / 2,
- Heap(x) => *x + usize::MAX / 4,
- Invalid(x) => *x,
+ Stack(it) => *it + STACK_OFFSET,
+ Heap(it) => *it + HEAP_OFFSET,
+ Invalid(it) => *it,
};
as_num
}
fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
match self {
- Stack(x) => Stack(f(*x)),
- Heap(x) => Heap(f(*x)),
- Invalid(x) => Invalid(f(*x)),
+ Stack(it) => Stack(f(*it)),
+ Heap(it) => Heap(f(*it)),
+ Invalid(it) => Invalid(f(*it)),
}
}
fn offset(&self, offset: usize) -> Address {
- self.map(|x| x + offset)
+ self.map(|it| it + offset)
}
}
@@ -282,13 +334,14 @@ pub enum MirEvalError {
TypeIsUnsized(Ty, &'static str),
NotSupported(String),
InvalidConst(Const),
- InFunction(Either<FunctionId, ClosureId>, Box<MirEvalError>, MirSpan, DefWithBodyId),
+ InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>),
ExecutionLimitExceeded,
StackOverflow,
TargetDataLayoutNotAvailable,
InvalidVTableId(usize),
CoerceUnsizedError(Ty),
LangItemNotFound(LangItem),
+ BrokenLayout(Layout),
}
impl MirEvalError {
@@ -300,40 +353,42 @@ impl MirEvalError {
) -> std::result::Result<(), std::fmt::Error> {
writeln!(f, "Mir eval error:")?;
let mut err = self;
- while let MirEvalError::InFunction(func, e, span, def) = err {
+ while let MirEvalError::InFunction(e, stack) = err {
err = e;
- match func {
- Either::Left(func) => {
- let function_name = db.function_data(*func);
- writeln!(
- f,
- "In function {} ({:?})",
- function_name.name.display(db.upcast()),
- func
- )?;
- }
- Either::Right(clos) => {
- writeln!(f, "In {:?}", clos)?;
+ for (func, span, def) in stack.iter().take(30).rev() {
+ match func {
+ Either::Left(func) => {
+ let function_name = db.function_data(*func);
+ writeln!(
+ f,
+ "In function {} ({:?})",
+ function_name.name.display(db.upcast()),
+ func
+ )?;
+ }
+ Either::Right(clos) => {
+ writeln!(f, "In {:?}", clos)?;
+ }
}
+ let source_map = db.body_with_source_map(*def).1;
+ let span: InFile<SyntaxNodePtr> = match span {
+ MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
+ Ok(s) => s.map(|it| it.into()),
+ Err(_) => continue,
+ },
+ MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
+ Ok(s) => s.map(|it| match it {
+ Either::Left(e) => e.into(),
+ Either::Right(e) => e.into(),
+ }),
+ Err(_) => continue,
+ },
+ MirSpan::Unknown => continue,
+ };
+ let file_id = span.file_id.original_file(db.upcast());
+ let text_range = span.value.text_range();
+ writeln!(f, "{}", span_formatter(file_id, text_range))?;
}
- let source_map = db.body_with_source_map(*def).1;
- let span: InFile<SyntaxNodePtr> = match span {
- MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
- Ok(s) => s.map(|x| x.into()),
- Err(_) => continue,
- },
- MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|x| match x {
- Either::Left(e) => e.into(),
- Either::Right(e) => e.into(),
- }),
- Err(_) => continue,
- },
- MirSpan::Unknown => continue,
- };
- let file_id = span.file_id.original_file(db.upcast());
- let text_range = span.value.text_range();
- writeln!(f, "{}", span_formatter(file_id, text_range))?;
}
match err {
MirEvalError::InFunction(..) => unreachable!(),
@@ -373,6 +428,7 @@ impl MirEvalError {
| MirEvalError::TargetDataLayoutNotAvailable
| MirEvalError::CoerceUnsizedError(_)
| MirEvalError::LangItemNotFound(_)
+ | MirEvalError::BrokenLayout(_)
| MirEvalError::InvalidVTableId(_) => writeln!(f, "{:?}", err)?,
}
Ok(())
@@ -407,19 +463,14 @@ impl std::fmt::Debug for MirEvalError {
Self::CoerceUnsizedError(arg0) => {
f.debug_tuple("CoerceUnsizedError").field(arg0).finish()
}
+ Self::BrokenLayout(arg0) => f.debug_tuple("BrokenLayout").field(arg0).finish(),
Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),
Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
Self::InvalidConst(arg0) => {
let data = &arg0.data(Interner);
f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish()
}
- Self::InFunction(func, e, span, _) => {
- let mut e = &**e;
- let mut stack = vec![(*func, *span)];
- while let Self::InFunction(f, next_e, span, _) = e {
- e = &next_e;
- stack.push((*f, *span));
- }
+ Self::InFunction(e, stack) => {
f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()
}
}
@@ -435,85 +486,126 @@ struct DropFlags {
impl DropFlags {
fn add_place(&mut self, p: Place) {
- if p.iterate_over_parents().any(|x| self.need_drop.contains(&x)) {
+ if p.iterate_over_parents().any(|it| self.need_drop.contains(&it)) {
return;
}
- self.need_drop.retain(|x| !p.is_parent(x));
+ self.need_drop.retain(|it| !p.is_parent(it));
self.need_drop.insert(p);
}
fn remove_place(&mut self, p: &Place) -> bool {
// FIXME: replace parents with parts
+ if let Some(parent) = p.iterate_over_parents().find(|it| self.need_drop.contains(&it)) {
+ self.need_drop.remove(&parent);
+ return true;
+ }
self.need_drop.remove(p)
}
+
+ fn clear(&mut self) {
+ self.need_drop.clear();
+ }
}
#[derive(Debug)]
-struct Locals<'a> {
- ptr: &'a ArenaMap<LocalId, Interval>,
- body: &'a MirBody,
+struct Locals {
+ ptr: ArenaMap<LocalId, Interval>,
+ body: Arc<MirBody>,
drop_flags: DropFlags,
}
pub fn interpret_mir(
db: &dyn HirDatabase,
- body: &MirBody,
+ body: Arc<MirBody>,
// FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
// they share their body with their parent, so in MIR lowering we have locals of the parent body, which
// might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
// a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can
// (and probably should) do better here, for example by excluding bindings outside of the target expression.
assert_placeholder_ty_is_unused: bool,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> (Result<Const>, String, String) {
let ty = body.locals[return_slot()].ty.clone();
- let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused);
- let x: Result<Const> = (|| {
- let bytes = evaluator.interpret_mir(&body, None.into_iter())?;
+ let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env);
+ let it: Result<Const> = (|| {
+ if evaluator.ptr_size() != std::mem::size_of::<usize>() {
+ not_supported!("targets with different pointer size from host");
+ }
+ let bytes = evaluator.interpret_mir(body.clone(), None.into_iter())?;
let mut memory_map = evaluator.create_memory_map(
&bytes,
&ty,
- &Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() },
+ &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() },
)?;
memory_map.vtable = evaluator.vtable_map.clone();
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
})();
(
- x,
+ it,
String::from_utf8_lossy(&evaluator.stdout).into_owned(),
String::from_utf8_lossy(&evaluator.stderr).into_owned(),
)
}
+#[cfg(test)]
+const EXECUTION_LIMIT: usize = 100_000;
+#[cfg(not(test))]
+const EXECUTION_LIMIT: usize = 10_000_000;
+
impl Evaluator<'_> {
pub fn new<'a>(
db: &'a dyn HirDatabase,
- body: &MirBody,
+ owner: DefWithBodyId,
assert_placeholder_ty_is_unused: bool,
+ trait_env: Option<Arc<TraitEnvironment>>,
) -> Evaluator<'a> {
- let crate_id = body.owner.module(db.upcast()).krate();
- let trait_env = db.trait_environment_for_body(body.owner);
+ let crate_id = owner.module(db.upcast()).krate();
Evaluator {
stack: vec![0],
heap: vec![0],
+ code_stack: vec![],
vtable_map: VTableMap::default(),
thread_local_storage: TlsData::default(),
static_locations: HashMap::default(),
db,
- trait_env,
+ random_state: oorandom::Rand64::new(0),
+ trait_env: trait_env.unwrap_or_else(|| db.trait_environment_for_body(owner)),
crate_id,
stdout: vec![],
stderr: vec![],
assert_placeholder_ty_is_unused,
stack_depth_limit: 100,
- execution_limit: 1000_000,
+ execution_limit: EXECUTION_LIMIT,
+ memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap
+ layout_cache: RefCell::new(HashMap::default()),
+ projected_ty_cache: RefCell::new(HashMap::default()),
+ not_special_fn_cache: RefCell::new(HashSet::default()),
+ mir_or_dyn_index_cache: RefCell::new(HashMap::default()),
+ unused_locals_store: RefCell::new(HashMap::default()),
+ cached_ptr_size: match db.target_data_layout(crate_id) {
+ Some(it) => it.pointer_size.bytes_usize(),
+ None => 8,
+ },
+ cached_fn_trait_func: db
+ .lang_item(crate_id, LangItem::Fn)
+ .and_then(|x| x.as_trait())
+ .and_then(|x| db.trait_data(x).method_by_name(&name![call])),
+ cached_fn_mut_trait_func: db
+ .lang_item(crate_id, LangItem::FnMut)
+ .and_then(|x| x.as_trait())
+ .and_then(|x| db.trait_data(x).method_by_name(&name![call_mut])),
+ cached_fn_once_trait_func: db
+ .lang_item(crate_id, LangItem::FnOnce)
+ .and_then(|x| x.as_trait())
+ .and_then(|x| db.trait_data(x).method_by_name(&name![call_once])),
}
}
- fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> {
+ fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
}
- fn place_interval(&self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
+ fn place_interval(&self, p: &Place, locals: &Locals) -> Result<Interval> {
let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
Ok(Interval {
addr: place_addr_and_ty.0,
@@ -526,39 +618,47 @@ impl Evaluator<'_> {
}
fn ptr_size(&self) -> usize {
- match self.db.target_data_layout(self.crate_id) {
- Some(x) => x.pointer_size.bytes_usize(),
- None => 8,
+ self.cached_ptr_size
+ }
+
+ fn projected_ty(&self, ty: Ty, proj: PlaceElem) -> Ty {
+ let pair = (ty, proj);
+ if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {
+ return r.clone();
}
+ let (ty, proj) = pair;
+ let r = proj.projected_ty(
+ ty.clone(),
+ self.db,
+ |c, subst, f| {
+ let (def, _) = self.db.lookup_intern_closure(c.into());
+ let infer = self.db.infer(def);
+ let (captures, _) = infer.closure_info(&c);
+ let parent_subst = ClosureSubst(subst).parent_subst();
+ captures
+ .get(f)
+ .expect("broken closure field")
+ .ty
+ .clone()
+ .substitute(Interner, parent_subst)
+ },
+ self.crate_id,
+ );
+ self.projected_ty_cache.borrow_mut().insert((ty, proj), r.clone());
+ r
}
fn place_addr_and_ty_and_metadata<'a>(
&'a self,
p: &Place,
- locals: &'a Locals<'a>,
+ locals: &'a Locals,
) -> Result<(Address, Ty, Option<IntervalOrOwned>)> {
let mut addr = locals.ptr[p.local].addr;
let mut ty: Ty = locals.body.locals[p.local].ty.clone();
let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized
for proj in &*p.projection {
let prev_ty = ty.clone();
- ty = proj.projected_ty(
- ty,
- self.db,
- |c, subst, f| {
- let (def, _) = self.db.lookup_intern_closure(c.into());
- let infer = self.db.infer(def);
- let (captures, _) = infer.closure_info(&c);
- let parent_subst = ClosureSubst(subst).parent_subst();
- captures
- .get(f)
- .expect("broken closure field")
- .ty
- .clone()
- .substitute(Interner, parent_subst)
- },
- self.crate_id,
- );
+ ty = self.projected_ty(ty, proj.clone());
match proj {
ProjectionElem::Deref => {
metadata = if self.size_align_of(&ty, locals)?.is_none() {
@@ -569,8 +669,8 @@ impl Evaluator<'_> {
} else {
None
};
- let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
- addr = Address::from_usize(x);
+ let it = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
+ addr = Address::from_usize(it);
}
ProjectionElem::Index(op) => {
let offset = from_bytes!(
@@ -586,13 +686,13 @@ impl Evaluator<'_> {
let offset = if from_end {
let len = match prev_ty.kind(Interner) {
TyKind::Array(_, c) => match try_const_usize(self.db, c) {
- Some(x) => x as u64,
+ Some(it) => it as u64,
None => {
not_supported!("indexing array with unknown const from end")
}
},
TyKind::Slice(_) => match metadata {
- Some(x) => from_bytes!(u64, x.get(self)?),
+ Some(it) => from_bytes!(u64, it.get(self)?),
None => not_supported!("slice place without metadata"),
},
_ => not_supported!("bad type for const index"),
@@ -607,13 +707,13 @@ impl Evaluator<'_> {
addr = addr.offset(ty_size * offset);
}
&ProjectionElem::Subslice { from, to } => {
- let inner_ty = match &ty.data(Interner).kind {
+ let inner_ty = match &ty.kind(Interner) {
TyKind::Array(inner, _) | TyKind::Slice(inner) => inner.clone(),
_ => TyKind::Error.intern(Interner),
};
metadata = match metadata {
- Some(x) => {
- let prev_len = from_bytes!(u64, x.get(self)?);
+ Some(it) => {
+ let prev_len = from_bytes!(u64, it.get(self)?);
Some(IntervalOrOwned::Owned(
(prev_len - from - to).to_le_bytes().to_vec(),
))
@@ -636,8 +736,8 @@ impl Evaluator<'_> {
Variants::Single { .. } => &layout,
Variants::Multiple { variants, .. } => {
&variants[match f.parent {
- hir_def::VariantId::EnumVariantId(x) => {
- RustcEnumVariantIdx(x.local_id)
+ hir_def::VariantId::EnumVariantId(it) => {
+ RustcEnumVariantIdx(it.local_id)
}
_ => {
return Err(MirEvalError::TypeError(
@@ -652,8 +752,10 @@ impl Evaluator<'_> {
.offset(u32::from(f.local_id.into_raw()) as usize)
.bytes_usize();
addr = addr.offset(offset);
- // FIXME: support structs with unsized fields
- metadata = None;
+ // Unsized field metadata is equal to the metadata of the struct
+ if self.size_align_of(&ty, locals)?.is_some() {
+ metadata = None;
+ }
}
ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
}
@@ -662,22 +764,26 @@ impl Evaluator<'_> {
}
fn layout(&self, ty: &Ty) -> Result<Arc<Layout>> {
- self.db
- .layout_of_ty(ty.clone(), self.crate_id)
- .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))
+ if let Some(x) = self.layout_cache.borrow().get(ty) {
+ return Ok(x.clone());
+ }
+ let r = self
+ .db
+ .layout_of_ty(ty.clone(), self.trait_env.clone())
+ .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))?;
+ self.layout_cache.borrow_mut().insert(ty.clone(), r.clone());
+ Ok(r)
}
fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> {
- self.db.layout_of_adt(adt, subst.clone(), self.crate_id).map_err(|e| {
- MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
- })
+ self.layout(&TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
}
- fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> {
+ fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<Ty> {
Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
}
- fn operand_ty(&self, o: &Operand, locals: &Locals<'_>) -> Result<Ty> {
+ fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<Ty> {
Ok(match o {
Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?,
Operand::Constant(c) => c.data(Interner).ty.clone(),
@@ -688,11 +794,7 @@ impl Evaluator<'_> {
})
}
- fn operand_ty_and_eval(
- &mut self,
- o: &Operand,
- locals: &mut Locals<'_>,
- ) -> Result<IntervalAndTy> {
+ fn operand_ty_and_eval(&mut self, o: &Operand, locals: &mut Locals) -> Result<IntervalAndTy> {
Ok(IntervalAndTy {
interval: self.eval_operand(o, locals)?,
ty: self.operand_ty(o, locals)?,
@@ -701,39 +803,178 @@ impl Evaluator<'_> {
fn interpret_mir(
&mut self,
- body: &MirBody,
- args: impl Iterator<Item = Vec<u8>>,
+ body: Arc<MirBody>,
+ args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<Vec<u8>> {
- if let Some(x) = self.stack_depth_limit.checked_sub(1) {
- self.stack_depth_limit = x;
+ if let Some(it) = self.stack_depth_limit.checked_sub(1) {
+ self.stack_depth_limit = it;
} else {
return Err(MirEvalError::StackOverflow);
}
let mut current_block_idx = body.start_block;
- let mut locals =
- Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() };
- let (locals_ptr, stack_size) = {
- let mut stack_ptr = self.stack.len();
- let addr = body
- .locals
- .iter()
- .map(|(id, x)| {
- let size =
- self.size_of_sized(&x.ty, &locals, "no unsized local in extending stack")?;
- let my_ptr = stack_ptr;
- stack_ptr += size;
- Ok((id, Interval { addr: Stack(my_ptr), size }))
- })
- .collect::<Result<ArenaMap<LocalId, _>>>()?;
- let stack_size = stack_ptr - self.stack.len();
- (addr, stack_size)
- };
- locals.ptr = &locals_ptr;
- self.stack.extend(iter::repeat(0).take(stack_size));
+ let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&body, None)?;
+ self.fill_locals_for_body(&body, &mut locals, args)?;
+ let prev_code_stack = mem::take(&mut self.code_stack);
+ let span = (MirSpan::Unknown, body.owner);
+ self.code_stack.push(StackFrame { locals, destination: None, prev_stack_ptr, span });
+ 'stack: loop {
+ let Some(mut my_stack_frame) = self.code_stack.pop() else {
+ not_supported!("missing stack frame");
+ };
+ let e = (|| {
+ let mut locals = &mut my_stack_frame.locals;
+ let body = locals.body.clone();
+ loop {
+ let current_block = &body.basic_blocks[current_block_idx];
+ if let Some(it) = self.execution_limit.checked_sub(1) {
+ self.execution_limit = it;
+ } else {
+ return Err(MirEvalError::ExecutionLimitExceeded);
+ }
+ for statement in &current_block.statements {
+ match &statement.kind {
+ StatementKind::Assign(l, r) => {
+ let addr = self.place_addr(l, &locals)?;
+ let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?;
+ self.write_memory(addr, &result)?;
+ locals.drop_flags.add_place(l.clone());
+ }
+ StatementKind::Deinit(_) => not_supported!("de-init statement"),
+ StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_)
+ | StatementKind::Nop => (),
+ }
+ }
+ let Some(terminator) = current_block.terminator.as_ref() else {
+ not_supported!("block without terminator");
+ };
+ match &terminator.kind {
+ TerminatorKind::Goto { target } => {
+ current_block_idx = *target;
+ }
+ TerminatorKind::Call {
+ func,
+ args,
+ destination,
+ target,
+ cleanup: _,
+ from_hir_call: _,
+ } => {
+ let destination_interval = self.place_interval(destination, &locals)?;
+ let fn_ty = self.operand_ty(func, &locals)?;
+ let args = args
+ .iter()
+ .map(|it| self.operand_ty_and_eval(it, &mut locals))
+ .collect::<Result<Vec<_>>>()?;
+ let stack_frame = match &fn_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let bytes = self.eval_operand(func, &mut locals)?;
+ self.exec_fn_pointer(
+ bytes,
+ destination_interval,
+ &args,
+ &locals,
+ *target,
+ terminator.span,
+ )?
+ }
+ TyKind::FnDef(def, generic_args) => self.exec_fn_def(
+ *def,
+ generic_args,
+ destination_interval,
+ &args,
+ &locals,
+ *target,
+ terminator.span,
+ )?,
+ it => not_supported!("unknown function type {it:?}"),
+ };
+ locals.drop_flags.add_place(destination.clone());
+ if let Some(stack_frame) = stack_frame {
+ self.code_stack.push(my_stack_frame);
+ current_block_idx = stack_frame.locals.body.start_block;
+ self.code_stack.push(stack_frame);
+ return Ok(None);
+ } else {
+ current_block_idx =
+ target.ok_or(MirEvalError::UndefinedBehavior(
+ "Diverging function returned".to_owned(),
+ ))?;
+ }
+ }
+ TerminatorKind::SwitchInt { discr, targets } => {
+ let val = u128::from_le_bytes(pad16(
+ self.eval_operand(discr, &mut locals)?.get(&self)?,
+ false,
+ ));
+ current_block_idx = targets.target_for_value(val);
+ }
+ TerminatorKind::Return => {
+ break;
+ }
+ TerminatorKind::Unreachable => {
+ return Err(MirEvalError::UndefinedBehavior(
+ "unreachable executed".to_owned(),
+ ));
+ }
+ TerminatorKind::Drop { place, target, unwind: _ } => {
+ self.drop_place(place, &mut locals, terminator.span)?;
+ current_block_idx = *target;
+ }
+ _ => not_supported!("unknown terminator"),
+ }
+ }
+ Ok(Some(my_stack_frame))
+ })();
+ let my_stack_frame = match e {
+ Ok(None) => continue 'stack,
+ Ok(Some(x)) => x,
+ Err(e) => {
+ let my_code_stack = mem::replace(&mut self.code_stack, prev_code_stack);
+ let mut error_stack = vec![];
+ for frame in my_code_stack.into_iter().rev() {
+ if let DefWithBodyId::FunctionId(f) = frame.locals.body.owner {
+ error_stack.push((Either::Left(f), frame.span.0, frame.span.1));
+ }
+ }
+ return Err(MirEvalError::InFunction(Box::new(e), error_stack));
+ }
+ };
+ let return_interval = my_stack_frame.locals.ptr[return_slot()];
+ self.unused_locals_store
+ .borrow_mut()
+ .entry(my_stack_frame.locals.body.owner)
+ .or_default()
+ .push(my_stack_frame.locals);
+ match my_stack_frame.destination {
+ None => {
+ self.code_stack = prev_code_stack;
+ self.stack_depth_limit += 1;
+ return Ok(return_interval.get(self)?.to_vec());
+ }
+ Some(bb) => {
+ // We don't support const promotion, so we can't truncate the stack yet.
+ let _ = my_stack_frame.prev_stack_ptr;
+ // self.stack.truncate(my_stack_frame.prev_stack_ptr);
+ current_block_idx = bb;
+ }
+ }
+ }
+ }
+
+ fn fill_locals_for_body(
+ &mut self,
+ body: &MirBody,
+ locals: &mut Locals,
+ args: impl Iterator<Item = IntervalOrOwned>,
+ ) -> Result<()> {
let mut remain_args = body.param_locals.len();
- for ((l, interval), value) in locals_ptr.iter().skip(1).zip(args) {
+ for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) {
locals.drop_flags.add_place(l.into());
- interval.write_from_bytes(self, &value)?;
+ match value {
+ IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?,
+ IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,
+ }
if remain_args == 0 {
return Err(MirEvalError::TypeError("more arguments provided"));
}
@@ -742,101 +983,64 @@ impl Evaluator<'_> {
if remain_args > 0 {
return Err(MirEvalError::TypeError("not enough arguments provided"));
}
- loop {
- let current_block = &body.basic_blocks[current_block_idx];
- if let Some(x) = self.execution_limit.checked_sub(1) {
- self.execution_limit = x;
- } else {
- return Err(MirEvalError::ExecutionLimitExceeded);
- }
- for statement in &current_block.statements {
- match &statement.kind {
- StatementKind::Assign(l, r) => {
- let addr = self.place_addr(l, &locals)?;
- let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?;
- self.write_memory(addr, &result)?;
- locals.drop_flags.add_place(l.clone());
- }
- StatementKind::Deinit(_) => not_supported!("de-init statement"),
- StatementKind::StorageLive(_)
- | StatementKind::StorageDead(_)
- | StatementKind::Nop => (),
+ Ok(())
+ }
+
+ fn create_locals_for_body(
+ &mut self,
+ body: &Arc<MirBody>,
+ destination: Option<Interval>,
+ ) -> Result<(Locals, usize)> {
+ let mut locals =
+ match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {
+ None => Locals {
+ ptr: ArenaMap::new(),
+ body: body.clone(),
+ drop_flags: DropFlags::default(),
+ },
+ Some(mut l) => {
+ l.drop_flags.clear();
+ l.body = body.clone();
+ l
}
- }
- let Some(terminator) = current_block.terminator.as_ref() else {
- not_supported!("block without terminator");
};
- match &terminator.kind {
- TerminatorKind::Goto { target } => {
- current_block_idx = *target;
- }
- TerminatorKind::Call {
- func,
- args,
- destination,
- target,
- cleanup: _,
- from_hir_call: _,
- } => {
- let destination_interval = self.place_interval(destination, &locals)?;
- let fn_ty = self.operand_ty(func, &locals)?;
- let args = args
- .iter()
- .map(|x| self.operand_ty_and_eval(x, &mut locals))
- .collect::<Result<Vec<_>>>()?;
- match &fn_ty.data(Interner).kind {
- TyKind::Function(_) => {
- let bytes = self.eval_operand(func, &mut locals)?;
- self.exec_fn_pointer(
- bytes,
- destination_interval,
- &args,
- &locals,
- terminator.span,
- )?;
- }
- TyKind::FnDef(def, generic_args) => {
- self.exec_fn_def(
- *def,
- generic_args,
- destination_interval,
- &args,
- &locals,
- terminator.span,
- )?;
- }
- x => not_supported!("unknown function type {x:?}"),
+ let stack_size = {
+ let mut stack_ptr = self.stack.len();
+ for (id, it) in body.locals.iter() {
+ if id == return_slot() {
+ if let Some(destination) = destination {
+ locals.ptr.insert(id, destination);
+ continue;
}
- locals.drop_flags.add_place(destination.clone());
- current_block_idx = target.expect("broken mir, function without target");
- }
- TerminatorKind::SwitchInt { discr, targets } => {
- let val = u128::from_le_bytes(pad16(
- self.eval_operand(discr, &mut locals)?.get(&self)?,
- false,
- ));
- current_block_idx = targets.target_for_value(val);
- }
- TerminatorKind::Return => {
- self.stack_depth_limit += 1;
- return Ok(locals.ptr[return_slot()].get(self)?.to_vec());
}
- TerminatorKind::Unreachable => {
- return Err(MirEvalError::UndefinedBehavior("unreachable executed".to_owned()));
- }
- TerminatorKind::Drop { place, target, unwind: _ } => {
- self.drop_place(place, &mut locals, terminator.span)?;
- current_block_idx = *target;
+ let (size, align) = self.size_align_of_sized(
+ &it.ty,
+ &locals,
+ "no unsized local in extending stack",
+ )?;
+ while stack_ptr % align != 0 {
+ stack_ptr += 1;
}
- _ => not_supported!("unknown terminator"),
+ let my_ptr = stack_ptr;
+ stack_ptr += size;
+ locals.ptr.insert(id, Interval { addr: Stack(my_ptr), size });
}
+ stack_ptr - self.stack.len()
+ };
+ let prev_stack_pointer = self.stack.len();
+ if stack_size > self.memory_limit {
+ return Err(MirEvalError::Panic(format!(
+ "Stack overflow. Tried to grow stack to {stack_size} bytes"
+ )));
}
+ self.stack.extend(iter::repeat(0).take(stack_size));
+ Ok((locals, prev_stack_pointer))
}
- fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals<'_>) -> Result<IntervalOrOwned> {
+ fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<IntervalOrOwned> {
use IntervalOrOwned::*;
Ok(match r {
- Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
+ Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
Rvalue::Ref(_, p) => {
let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
let mut r = addr.to_bytes();
@@ -881,9 +1085,9 @@ impl Evaluator<'_> {
c[0] = 1 - c[0];
} else {
match op {
- UnOp::Not => c.iter_mut().for_each(|x| *x = !*x),
+ UnOp::Not => c.iter_mut().for_each(|it| *it = !*it),
UnOp::Neg => {
- c.iter_mut().for_each(|x| *x = !*x);
+ c.iter_mut().for_each(|it| *it = !*it);
for k in c.iter_mut() {
let o;
(*k, o) = k.overflowing_add(1);
@@ -948,8 +1152,8 @@ impl Evaluator<'_> {
};
Owned(r.to_le_bytes().into())
}
- x => not_supported!(
- "invalid binop {x:?} on floating point operators"
+ it => not_supported!(
+ "invalid binop {it:?} on floating point operators"
),
}
}
@@ -976,8 +1180,8 @@ impl Evaluator<'_> {
};
Owned(r.to_le_bytes().into())
}
- x => not_supported!(
- "invalid binop {x:?} on floating point operators"
+ it => not_supported!(
+ "invalid binop {it:?} on floating point operators"
),
}
}
@@ -1034,13 +1238,18 @@ impl Evaluator<'_> {
BinOp::Shr => l128.checked_shr(shift_amount),
_ => unreachable!(),
};
+ if shift_amount as usize >= lc.len() * 8 {
+ return Err(MirEvalError::Panic(format!(
+ "Overflow in {op:?}"
+ )));
+ }
if let Some(r) = r {
break 'b r;
}
};
return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
};
- check_overflow(r)?
+ Owned(r.to_le_bytes()[..lc.len()].to_vec())
}
BinOp::Offset => not_supported!("offset binop"),
}
@@ -1049,64 +1258,15 @@ impl Evaluator<'_> {
Rvalue::Discriminant(p) => {
let ty = self.place_ty(p, locals)?;
let bytes = self.eval_place(p, locals)?.get(&self)?;
- let layout = self.layout(&ty)?;
- let enum_id = 'b: {
- match ty.kind(Interner) {
- TyKind::Adt(e, _) => match e.0 {
- AdtId::EnumId(e) => break 'b e,
- _ => (),
- },
- _ => (),
- }
- return Ok(Owned(0u128.to_le_bytes().to_vec()));
- };
- match &layout.variants {
- Variants::Single { index } => {
- let r = self.const_eval_discriminant(EnumVariantId {
- parent: enum_id,
- local_id: index.0,
- })?;
- Owned(r.to_le_bytes().to_vec())
- }
- Variants::Multiple { tag, tag_encoding, variants, .. } => {
- let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
- not_supported!("missing target data layout");
- };
- let size = tag.size(&*target_data_layout).bytes_usize();
- let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
- match tag_encoding {
- TagEncoding::Direct => {
- let tag = &bytes[offset..offset + size];
- Owned(pad16(tag, false).to_vec())
- }
- TagEncoding::Niche { untagged_variant, niche_start, .. } => {
- let tag = &bytes[offset..offset + size];
- let candidate_tag = i128::from_le_bytes(pad16(tag, false))
- .wrapping_sub(*niche_start as i128)
- as usize;
- let variant = variants
- .iter_enumerated()
- .map(|(x, _)| x)
- .filter(|x| x != untagged_variant)
- .nth(candidate_tag)
- .unwrap_or(*untagged_variant)
- .0;
- let result = self.const_eval_discriminant(EnumVariantId {
- parent: enum_id,
- local_id: variant,
- })?;
- Owned(result.to_le_bytes().to_vec())
- }
- }
- }
- }
+ let result = self.compute_discriminant(ty, bytes)?;
+ Owned(result.to_le_bytes().to_vec())
}
- Rvalue::Repeat(x, len) => {
+ Rvalue::Repeat(it, len) => {
let len = match try_const_usize(self.db, &len) {
- Some(x) => x as usize,
+ Some(it) => it as usize,
None => not_supported!("non evaluatable array len in repeat Rvalue"),
};
- let val = self.eval_operand(x, locals)?.get(self)?;
+ let val = self.eval_operand(it, locals)?.get(self)?;
let size = len * val.len();
Owned(val.iter().copied().cycle().take(size).collect())
}
@@ -1115,20 +1275,20 @@ impl Evaluator<'_> {
let Some((size, align)) = self.size_align_of(ty, locals)? else {
not_supported!("unsized box initialization");
};
- let addr = self.heap_allocate(size, align);
+ let addr = self.heap_allocate(size, align)?;
Owned(addr.to_bytes())
}
Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
Rvalue::Aggregate(kind, values) => {
let values = values
.iter()
- .map(|x| self.eval_operand(x, locals))
+ .map(|it| self.eval_operand(it, locals))
.collect::<Result<Vec<_>>>()?;
match kind {
AggregateKind::Array(_) => {
let mut r = vec![];
- for x in values {
- let value = x.get(&self)?;
+ for it in values {
+ let value = it.get(&self)?;
r.extend(value);
}
Owned(r)
@@ -1139,11 +1299,12 @@ impl Evaluator<'_> {
layout.size.bytes_usize(),
&layout,
None,
- values.iter().map(|&x| x.into()),
+ values.iter().map(|&it| it.into()),
)?)
}
- AggregateKind::Union(x, f) => {
- let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
+ AggregateKind::Union(it, f) => {
+ let layout =
+ self.layout_adt((*it).into(), Substitution::empty(Interner))?;
let offset = layout
.fields
.offset(u32::from(f.local_id.into_raw()) as usize)
@@ -1153,14 +1314,14 @@ impl Evaluator<'_> {
result[offset..offset + op.len()].copy_from_slice(op);
Owned(result)
}
- AggregateKind::Adt(x, subst) => {
+ AggregateKind::Adt(it, subst) => {
let (size, variant_layout, tag) =
- self.layout_of_variant(*x, subst.clone(), locals)?;
+ self.layout_of_variant(*it, subst.clone(), locals)?;
Owned(self.make_by_layout(
size,
&variant_layout,
tag,
- values.iter().map(|&x| x.into()),
+ values.iter().map(|&it| it.into()),
)?)
}
AggregateKind::Closure(ty) => {
@@ -1169,7 +1330,7 @@ impl Evaluator<'_> {
layout.size.bytes_usize(),
&layout,
None,
- values.iter().map(|&x| x.into()),
+ values.iter().map(|&it| it.into()),
)?)
}
}
@@ -1179,7 +1340,7 @@ impl Evaluator<'_> {
PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => {
let current_ty = self.operand_ty(operand, locals)?;
if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) =
- &current_ty.data(Interner).kind
+ &current_ty.kind(Interner)
{
let id = self.vtable_map.id(current_ty);
let ptr_size = self.ptr_size();
@@ -1229,21 +1390,75 @@ impl Evaluator<'_> {
})
}
+ fn compute_discriminant(&self, ty: Ty, bytes: &[u8]) -> Result<i128> {
+ let layout = self.layout(&ty)?;
+ let enum_id = 'b: {
+ match ty.kind(Interner) {
+ TyKind::Adt(e, _) => match e.0 {
+ AdtId::EnumId(e) => break 'b e,
+ _ => (),
+ },
+ _ => (),
+ }
+ return Ok(0);
+ };
+ match &layout.variants {
+ Variants::Single { index } => {
+ let r = self.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id: index.0,
+ })?;
+ Ok(r)
+ }
+ Variants::Multiple { tag, tag_encoding, variants, .. } => {
+ let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
+ not_supported!("missing target data layout");
+ };
+ let size = tag.size(&*target_data_layout).bytes_usize();
+ let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
+ match tag_encoding {
+ TagEncoding::Direct => {
+ let tag = &bytes[offset..offset + size];
+ Ok(i128::from_le_bytes(pad16(tag, false)))
+ }
+ TagEncoding::Niche { untagged_variant, niche_start, .. } => {
+ let tag = &bytes[offset..offset + size];
+ let candidate_tag = i128::from_le_bytes(pad16(tag, false))
+ .wrapping_sub(*niche_start as i128)
+ as usize;
+ let variant = variants
+ .iter_enumerated()
+ .map(|(it, _)| it)
+ .filter(|it| it != untagged_variant)
+ .nth(candidate_tag)
+ .unwrap_or(*untagged_variant)
+ .0;
+ let result = self.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id: variant,
+ })?;
+ Ok(result)
+ }
+ }
+ }
+ }
+ }
+
fn coerce_unsized_look_through_fields<T>(
&self,
ty: &Ty,
goal: impl Fn(&TyKind) -> Option<T>,
) -> Result<T> {
let kind = ty.kind(Interner);
- if let Some(x) = goal(kind) {
- return Ok(x);
+ if let Some(it) = goal(kind) {
+ return Ok(it);
}
if let TyKind::Adt(id, subst) = kind {
if let AdtId::StructId(struct_id) = id.0 {
let field_types = self.db.field_types(struct_id.into());
let mut field_types = field_types.iter();
if let Some(ty) =
- field_types.next().map(|x| x.1.clone().substitute(Interner, subst))
+ field_types.next().map(|it| it.1.clone().substitute(Interner, subst))
{
return self.coerce_unsized_look_through_fields(&ty, goal);
}
@@ -1258,66 +1473,99 @@ impl Evaluator<'_> {
current_ty: &Ty,
target_ty: &Ty,
) -> Result<IntervalOrOwned> {
- use IntervalOrOwned::*;
- fn for_ptr(x: &TyKind) -> Option<Ty> {
- match x {
+ fn for_ptr(it: &TyKind) -> Option<Ty> {
+ match it {
TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => Some(ty.clone()),
_ => None,
}
}
- Ok(match self.coerce_unsized_look_through_fields(target_ty, for_ptr)? {
- ty => match &ty.data(Interner).kind {
- TyKind::Slice(_) => {
- match self.coerce_unsized_look_through_fields(current_ty, for_ptr)? {
- ty => match &ty.data(Interner).kind {
- TyKind::Array(_, size) => {
- let len = match try_const_usize(self.db, size) {
- None => not_supported!(
- "unevaluatble len of array in coerce unsized"
- ),
- Some(x) => x as usize,
- };
- let mut r = Vec::with_capacity(16);
- let addr = addr.get(self)?;
- r.extend(addr.iter().copied());
- r.extend(len.to_le_bytes().into_iter());
- Owned(r)
- }
- t => {
- not_supported!("slice unsizing from non array type {t:?}")
- }
- },
- }
+ let target_ty = self.coerce_unsized_look_through_fields(target_ty, for_ptr)?;
+ let current_ty = self.coerce_unsized_look_through_fields(current_ty, for_ptr)?;
+
+ self.unsizing_ptr_from_addr(target_ty, current_ty, addr)
+ }
+
+ /// Adds metadata to the address and create the fat pointer result of the unsizing operation.
+ fn unsizing_ptr_from_addr(
+ &mut self,
+ target_ty: Ty,
+ current_ty: Ty,
+ addr: Interval,
+ ) -> Result<IntervalOrOwned> {
+ use IntervalOrOwned::*;
+ Ok(match &target_ty.kind(Interner) {
+ TyKind::Slice(_) => match &current_ty.kind(Interner) {
+ TyKind::Array(_, size) => {
+ let len = match try_const_usize(self.db, size) {
+ None => {
+ not_supported!("unevaluatble len of array in coerce unsized")
+ }
+ Some(it) => it as usize,
+ };
+ let mut r = Vec::with_capacity(16);
+ let addr = addr.get(self)?;
+ r.extend(addr.iter().copied());
+ r.extend(len.to_le_bytes().into_iter());
+ Owned(r)
}
- TyKind::Dyn(_) => match &current_ty.data(Interner).kind {
- TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => {
- let vtable = self.vtable_map.id(ty.clone());
- let mut r = Vec::with_capacity(16);
- let addr = addr.get(self)?;
- r.extend(addr.iter().copied());
- r.extend(vtable.to_le_bytes().into_iter());
- Owned(r)
+ t => {
+ not_supported!("slice unsizing from non array type {t:?}")
+ }
+ },
+ TyKind::Dyn(_) => {
+ let vtable = self.vtable_map.id(current_ty.clone());
+ let mut r = Vec::with_capacity(16);
+ let addr = addr.get(self)?;
+ r.extend(addr.iter().copied());
+ r.extend(vtable.to_le_bytes().into_iter());
+ Owned(r)
+ }
+ TyKind::Adt(id, target_subst) => match &current_ty.kind(Interner) {
+ TyKind::Adt(current_id, current_subst) => {
+ if id != current_id {
+ not_supported!("unsizing struct with different type");
}
- _ => not_supported!("dyn unsizing from non pointers"),
- },
- _ => not_supported!("unknown unsized cast"),
+ let id = match id.0 {
+ AdtId::StructId(s) => s,
+ AdtId::UnionId(_) => not_supported!("unsizing unions"),
+ AdtId::EnumId(_) => not_supported!("unsizing enums"),
+ };
+ let Some((last_field, _)) =
+ self.db.struct_data(id).variant_data.fields().iter().rev().next()
+ else {
+ not_supported!("unsizing struct without field");
+ };
+ let target_last_field = self.db.field_types(id.into())[last_field]
+ .clone()
+ .substitute(Interner, target_subst);
+ let current_last_field = self.db.field_types(id.into())[last_field]
+ .clone()
+ .substitute(Interner, current_subst);
+ return self.unsizing_ptr_from_addr(
+ target_last_field,
+ current_last_field,
+ addr,
+ );
+ }
+ _ => not_supported!("unsizing struct with non adt type"),
},
+ _ => not_supported!("unknown unsized cast"),
})
}
fn layout_of_variant(
&mut self,
- x: VariantId,
+ it: VariantId,
subst: Substitution,
- locals: &Locals<'_>,
+ locals: &Locals,
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
- let adt = x.adt_id();
+ let adt = it.adt_id();
if let DefWithBodyId::VariantId(f) = locals.body.owner {
- if let VariantId::EnumVariantId(x) = x {
+ if let VariantId::EnumVariantId(it) = it {
if AdtId::from(f.parent) == adt {
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
// infinite sized type errors) we use a dummy layout
- let i = self.const_eval_discriminant(x)?;
+ let i = self.const_eval_discriminant(it)?;
return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
}
}
@@ -1330,8 +1578,8 @@ impl Evaluator<'_> {
.db
.target_data_layout(self.crate_id)
.ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
- let enum_variant_id = match x {
- VariantId::EnumVariantId(x) => x,
+ let enum_variant_id = match it {
+ VariantId::EnumVariantId(it) => it,
_ => not_supported!("multi variant layout for non-enums"),
};
let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id);
@@ -1345,8 +1593,8 @@ impl Evaluator<'_> {
} else {
discriminant = (variants
.iter_enumerated()
- .filter(|(x, _)| x != untagged_variant)
- .position(|(x, _)| x == rustc_enum_variant_idx)
+ .filter(|(it, _)| it != untagged_variant)
+ .position(|(it, _)| it == rustc_enum_variant_idx)
.unwrap() as i128)
.wrapping_add(*niche_start as i128);
true
@@ -1379,18 +1627,24 @@ impl Evaluator<'_> {
) -> Result<Vec<u8>> {
let mut result = vec![0; size];
if let Some((offset, size, value)) = tag {
- result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]);
+ match result.get_mut(offset..offset + size) {
+ Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),
+ None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())),
+ }
}
for (i, op) in values.enumerate() {
let offset = variant_layout.fields.offset(i).bytes_usize();
let op = op.get(&self)?;
- result[offset..offset + op.len()].copy_from_slice(op);
+ match result.get_mut(offset..offset + op.len()) {
+ Some(it) => it.copy_from_slice(op),
+ None => return Err(MirEvalError::BrokenLayout(variant_layout.clone())),
+ }
}
Ok(result)
}
- fn eval_operand(&mut self, x: &Operand, locals: &mut Locals<'_>) -> Result<Interval> {
- Ok(match x {
+ fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<Interval> {
+ Ok(match it {
Operand::Copy(p) | Operand::Move(p) => {
locals.drop_flags.remove_place(p);
self.eval_place(p, locals)?
@@ -1399,61 +1653,66 @@ impl Evaluator<'_> {
let addr = self.eval_static(*st, locals)?;
Interval::new(addr, self.ptr_size())
}
- Operand::Constant(konst) => {
- let data = &konst.data(Interner);
- match &data.value {
- chalk_ir::ConstValue::BoundVar(_) => not_supported!("bound var constant"),
- chalk_ir::ConstValue::InferenceVar(_) => {
- not_supported!("inference var constant")
- }
- chalk_ir::ConstValue::Placeholder(_) => not_supported!("placeholder constant"),
- chalk_ir::ConstValue::Concrete(c) => {
- self.allocate_const_in_heap(c, &data.ty, locals, konst)?
- }
- }
- }
+ Operand::Constant(konst) => self.allocate_const_in_heap(locals, konst)?,
})
}
- fn allocate_const_in_heap(
- &mut self,
- c: &chalk_ir::ConcreteConst<Interner>,
- ty: &Ty,
- locals: &Locals<'_>,
- konst: &chalk_ir::Const<Interner>,
- ) -> Result<Interval> {
- Ok(match &c.interned {
- ConstScalar::Bytes(v, memory_map) => {
- let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
- let patch_map = memory_map.transform_addresses(|b| {
- let addr = self.heap_allocate(b.len(), 1); // FIXME: align is wrong
- self.write_memory(addr, b)?;
- Ok(addr.to_usize())
- })?;
- let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
- if size != v.len() {
- // Handle self enum
- if size == 16 && v.len() < 16 {
- v = Cow::Owned(pad16(&v, false).to_vec());
- } else if size < 16 && v.len() == 16 {
- v = Cow::Owned(v[0..size].to_vec());
- } else {
- return Err(MirEvalError::InvalidConst(konst.clone()));
+ fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> {
+ let ty = &konst.data(Interner).ty;
+ let chalk_ir::ConstValue::Concrete(c) = &konst.data(Interner).value else {
+ not_supported!("evaluating non concrete constant");
+ };
+ let result_owner;
+ let (v, memory_map) = match &c.interned {
+ ConstScalar::Bytes(v, mm) => (v, mm),
+ ConstScalar::UnevaluatedConst(const_id, subst) => 'b: {
+ let mut const_id = *const_id;
+ let mut subst = subst.clone();
+ if let hir_def::GeneralConstId::ConstId(c) = const_id {
+ let (c, s) = lookup_impl_const(self.db, self.trait_env.clone(), c, subst);
+ const_id = hir_def::GeneralConstId::ConstId(c);
+ subst = s;
+ }
+ result_owner = self
+ .db
+ .const_eval(const_id.into(), subst, Some(self.trait_env.clone()))
+ .map_err(|e| {
+ let name = const_id.name(self.db.upcast());
+ MirEvalError::ConstEvalError(name, Box::new(e))
+ })?;
+ if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value {
+ if let ConstScalar::Bytes(v, mm) = &c.interned {
+ break 'b (v, mm);
}
}
- let addr = self.heap_allocate(size, align);
- self.write_memory(addr, &v)?;
- self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?;
- Interval::new(addr, size)
- }
- ConstScalar::UnevaluatedConst(..) => {
- not_supported!("unevaluated const present in monomorphized mir");
+ not_supported!("unevaluatable constant");
}
ConstScalar::Unknown => not_supported!("evaluating unknown const"),
- })
+ };
+ let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
+ let patch_map = memory_map.transform_addresses(|b, align| {
+ let addr = self.heap_allocate(b.len(), align)?;
+ self.write_memory(addr, b)?;
+ Ok(addr.to_usize())
+ })?;
+ let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1));
+ if size != v.len() {
+ // Handle self enum
+ if size == 16 && v.len() < 16 {
+ v = Cow::Owned(pad16(&v, false).to_vec());
+ } else if size < 16 && v.len() == 16 {
+ v = Cow::Owned(v[0..size].to_vec());
+ } else {
+ return Err(MirEvalError::InvalidConst(konst.clone()));
+ }
+ }
+ let addr = self.heap_allocate(size, align)?;
+ self.write_memory(addr, &v)?;
+ self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?;
+ Ok(Interval::new(addr, size))
}
- fn eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
+ fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<Interval> {
let addr = self.place_addr(p, locals)?;
Ok(Interval::new(
addr,
@@ -1466,11 +1725,11 @@ impl Evaluator<'_> {
return Ok(&[]);
}
let (mem, pos) = match addr {
- Stack(x) => (&self.stack, x),
- Heap(x) => (&self.heap, x),
- Invalid(x) => {
+ Stack(it) => (&self.stack, it),
+ Heap(it) => (&self.heap, it),
+ Invalid(it) => {
return Err(MirEvalError::UndefinedBehavior(format!(
- "read invalid memory address {x} with size {size}"
+ "read invalid memory address {it} with size {size}"
)));
}
};
@@ -1478,28 +1737,35 @@ impl Evaluator<'_> {
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string()))
}
- fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
- if r.is_empty() {
- return Ok(());
- }
+ fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<&mut [u8]> {
let (mem, pos) = match addr {
- Stack(x) => (&mut self.stack, x),
- Heap(x) => (&mut self.heap, x),
- Invalid(x) => {
+ Stack(it) => (&mut self.stack, it),
+ Heap(it) => (&mut self.heap, it),
+ Invalid(it) => {
return Err(MirEvalError::UndefinedBehavior(format!(
- "write invalid memory address {x} with content {r:?}"
+ "write invalid memory address {it} with size {size}"
)));
}
};
- mem.get_mut(pos..pos + r.len())
- .ok_or_else(|| {
- MirEvalError::UndefinedBehavior("out of bound memory write".to_string())
- })?
- .copy_from_slice(r);
+ Ok(mem.get_mut(pos..pos + size).ok_or_else(|| {
+ MirEvalError::UndefinedBehavior("out of bound memory write".to_string())
+ })?)
+ }
+
+ fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
+ if r.is_empty() {
+ return Ok(());
+ }
+ self.write_memory_using_ref(addr, r.len())?.copy_from_slice(r);
Ok(())
}
- fn size_align_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Option<(usize, usize)>> {
+ fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> {
+ if let Some(layout) = self.layout_cache.borrow().get(ty) {
+ return Ok(layout
+ .is_sized()
+ .then(|| (layout.size.bytes_usize(), layout.align.abi.bytes() as usize)));
+ }
if let DefWithBodyId::VariantId(f) = locals.body.owner {
if let Some((adt, _)) = ty.as_adt() {
if AdtId::from(f.parent) == adt {
@@ -1523,39 +1789,61 @@ impl Evaluator<'_> {
/// A version of `self.size_of` which returns error if the type is unsized. `what` argument should
/// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
- fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize> {
+ fn size_of_sized(&self, ty: &Ty, locals: &Locals, what: &'static str) -> Result<usize> {
match self.size_align_of(ty, locals)? {
- Some(x) => Ok(x.0),
+ Some(it) => Ok(it.0),
None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
}
}
- fn heap_allocate(&mut self, size: usize, _align: usize) -> Address {
+ /// A version of `self.size_align_of` which returns error if the type is unsized. `what` argument should
+ /// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
+ fn size_align_of_sized(
+ &self,
+ ty: &Ty,
+ locals: &Locals,
+ what: &'static str,
+ ) -> Result<(usize, usize)> {
+ match self.size_align_of(ty, locals)? {
+ Some(it) => Ok(it),
+ None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
+ }
+ }
+
+ fn heap_allocate(&mut self, size: usize, align: usize) -> Result<Address> {
+ if !align.is_power_of_two() || align > 10000 {
+ return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid")));
+ }
+ while self.heap.len() % align != 0 {
+ self.heap.push(0);
+ }
+ if size.checked_add(self.heap.len()).map_or(true, |x| x > self.memory_limit) {
+ return Err(MirEvalError::Panic(format!("Memory allocation of {size} bytes failed")));
+ }
let pos = self.heap.len();
self.heap.extend(iter::repeat(0).take(size));
- Address::Heap(pos)
+ Ok(Address::Heap(pos))
}
fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> {
- use LangItem::*;
- let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else {
- return None;
- };
- let l = lang_attr(self.db.upcast(), parent)?;
- match l {
- FnOnce => Some(FnTrait::FnOnce),
- FnMut => Some(FnTrait::FnMut),
- Fn => Some(FnTrait::Fn),
- _ => None,
+ let def = Some(def);
+ if def == self.cached_fn_trait_func {
+ Some(FnTrait::Fn)
+ } else if def == self.cached_fn_mut_trait_func {
+ Some(FnTrait::FnMut)
+ } else if def == self.cached_fn_once_trait_func {
+ Some(FnTrait::FnOnce)
+ } else {
+ None
}
}
- fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> {
+ fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals) -> Result<MemoryMap> {
fn rec(
this: &Evaluator<'_>,
bytes: &[u8],
ty: &Ty,
- locals: &Locals<'_>,
+ locals: &Locals,
mm: &mut MemoryMap,
) -> Result<()> {
match ty.kind(Interner) {
@@ -1602,6 +1890,17 @@ impl Evaluator<'_> {
}
}
}
+ chalk_ir::TyKind::Array(inner, len) => {
+ let len = match try_const_usize(this.db, &len) {
+ Some(it) => it as usize,
+ None => not_supported!("non evaluatable array len in patching addresses"),
+ };
+ let size = this.size_of_sized(inner, locals, "inner of array")?;
+ for i in 0..len {
+ let offset = i * size;
+ rec(this, &bytes[offset..offset + size], inner, locals, mm)?;
+ }
+ }
chalk_ir::TyKind::Tuple(_, subst) => {
let layout = this.layout(ty)?;
for (id, ty) in subst.iter(Interner).enumerate() {
@@ -1628,9 +1927,13 @@ impl Evaluator<'_> {
}
AdtId::EnumId(e) => {
let layout = this.layout(ty)?;
- if let Some((v, l)) =
- detect_variant_from_bytes(&layout, this.db, this.crate_id, bytes, e)
- {
+ if let Some((v, l)) = detect_variant_from_bytes(
+ &layout,
+ this.db,
+ this.trait_env.clone(),
+ bytes,
+ e,
+ ) {
let data = &this.db.enum_data(e).variants[v].variant_data;
let field_types = this
.db
@@ -1661,7 +1964,7 @@ impl Evaluator<'_> {
old_vtable: &VTableMap,
addr: Address,
ty: &Ty,
- locals: &Locals<'_>,
+ locals: &Locals,
) -> Result<()> {
// FIXME: support indirect references
let layout = self.layout(ty)?;
@@ -1672,14 +1975,14 @@ impl Evaluator<'_> {
match size {
Some(_) => {
let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
- if let Some(x) = patch_map.get(&current) {
- self.write_memory(addr, &x.to_le_bytes())?;
+ if let Some(it) = patch_map.get(&current) {
+ self.write_memory(addr, &it.to_le_bytes())?;
}
}
None => {
let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
- if let Some(x) = patch_map.get(&current) {
- self.write_memory(addr, &x.to_le_bytes())?;
+ if let Some(it) = patch_map.get(&current) {
+ self.write_memory(addr, &it.to_le_bytes())?;
}
}
}
@@ -1706,10 +2009,31 @@ impl Evaluator<'_> {
AdtId::UnionId(_) => (),
AdtId::EnumId(_) => (),
},
+ TyKind::Tuple(_, subst) => {
+ for (id, ty) in subst.iter(Interner).enumerate() {
+ let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
+ let offset = layout.fields.offset(id).bytes_usize();
+ self.patch_addresses(patch_map, old_vtable, addr.offset(offset), ty, locals)?;
+ }
+ }
+ TyKind::Array(inner, len) => {
+ let len = match try_const_usize(self.db, &len) {
+ Some(it) => it as usize,
+ None => not_supported!("non evaluatable array len in patching addresses"),
+ };
+ let size = self.size_of_sized(inner, locals, "inner of array")?;
+ for i in 0..len {
+ self.patch_addresses(
+ patch_map,
+ old_vtable,
+ addr.offset(i * size),
+ inner,
+ locals,
+ )?;
+ }
+ }
TyKind::AssociatedType(_, _)
| TyKind::Scalar(_)
- | TyKind::Tuple(_, _)
- | TyKind::Array(_, _)
| TyKind::Slice(_)
| TyKind::Raw(_, _)
| TyKind::OpaqueType(_, _)
@@ -1735,21 +2059,21 @@ impl Evaluator<'_> {
bytes: Interval,
destination: Interval,
args: &[IntervalAndTy],
- locals: &Locals<'_>,
+ locals: &Locals,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let id = from_bytes!(usize, bytes.get(self)?);
let next_ty = self.vtable_map.ty(id)?.clone();
- match &next_ty.data(Interner).kind {
+ match &next_ty.kind(Interner) {
TyKind::FnDef(def, generic_args) => {
- self.exec_fn_def(*def, generic_args, destination, args, &locals, span)?;
+ self.exec_fn_def(*def, generic_args, destination, args, &locals, target_bb, span)
}
TyKind::Closure(id, subst) => {
- self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)?;
+ self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)
}
- _ => return Err(MirEvalError::TypeError("function pointer to non function")),
+ _ => Err(MirEvalError::TypeError("function pointer to non function")),
}
- Ok(())
}
fn exec_closure(
@@ -1759,9 +2083,9 @@ impl Evaluator<'_> {
generic_args: &Substitution,
destination: Interval,
args: &[IntervalAndTy],
- locals: &Locals<'_>,
+ locals: &Locals,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let mir_body = self
.db
.monomorphized_mir_body_for_closure(
@@ -1769,7 +2093,7 @@ impl Evaluator<'_> {
generic_args.clone(),
self.trait_env.clone(),
)
- .map_err(|x| MirEvalError::MirLowerErrorForClosure(closure, x))?;
+ .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
{
closure_data.addr.to_bytes()
@@ -1777,12 +2101,18 @@ impl Evaluator<'_> {
closure_data.get(self)?.to_owned()
};
let arg_bytes = iter::once(Ok(closure_data))
- .chain(args.iter().map(|x| Ok(x.get(&self)?.to_owned())))
+ .chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned())))
.collect::<Result<Vec<_>>>()?;
- let bytes = self.interpret_mir(&mir_body, arg_bytes.into_iter()).map_err(|e| {
- MirEvalError::InFunction(Either::Right(closure), Box::new(e), span, locals.body.owner)
- })?;
- destination.write_from_bytes(self, &bytes)
+ let bytes = self
+ .interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned))
+ .map_err(|e| {
+ MirEvalError::InFunction(
+ Box::new(e),
+ vec![(Either::Right(closure), span, locals.body.owner)],
+ )
+ })?;
+ destination.write_from_bytes(self, &bytes)?;
+ Ok(None)
}
fn exec_fn_def(
@@ -1791,18 +2121,34 @@ impl Evaluator<'_> {
generic_args: &Substitution,
destination: Interval,
args: &[IntervalAndTy],
- locals: &Locals<'_>,
+ locals: &Locals,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let def: CallableDefId = from_chalk(self.db, def);
let generic_args = generic_args.clone();
match def {
CallableDefId::FunctionId(def) => {
if let Some(_) = self.detect_fn_trait(def) {
- self.exec_fn_trait(&args, destination, locals, span)?;
- return Ok(());
+ return self.exec_fn_trait(
+ def,
+ args,
+ generic_args,
+ locals,
+ destination,
+ target_bb,
+ span,
+ );
}
- self.exec_fn_with_args(def, args, generic_args, locals, destination, span)?;
+ self.exec_fn_with_args(
+ def,
+ args,
+ generic_args,
+ locals,
+ destination,
+ target_bb,
+ span,
+ )
}
CallableDefId::StructId(id) => {
let (size, variant_layout, tag) =
@@ -1811,9 +2157,10 @@ impl Evaluator<'_> {
size,
&variant_layout,
tag,
- args.iter().map(|x| x.interval.into()),
+ args.iter().map(|it| it.interval.into()),
)?;
destination.write_from_bytes(self, &result)?;
+ Ok(None)
}
CallableDefId::EnumVariantId(id) => {
let (size, variant_layout, tag) =
@@ -1822,12 +2169,46 @@ impl Evaluator<'_> {
size,
&variant_layout,
tag,
- args.iter().map(|x| x.interval.into()),
+ args.iter().map(|it| it.interval.into()),
)?;
destination.write_from_bytes(self, &result)?;
+ Ok(None)
}
}
- Ok(())
+ }
+
+ fn get_mir_or_dyn_index(
+ &self,
+ def: FunctionId,
+ generic_args: Substitution,
+ locals: &Locals,
+ span: MirSpan,
+ ) -> Result<MirOrDynIndex> {
+ let pair = (def, generic_args);
+ if let Some(r) = self.mir_or_dyn_index_cache.borrow().get(&pair) {
+ return Ok(r.clone());
+ }
+ let (def, generic_args) = pair;
+ let r = if let Some(self_ty_idx) =
+ is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
+ {
+ MirOrDynIndex::Dyn(self_ty_idx)
+ } else {
+ let (imp, generic_args) =
+ self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone());
+ let mir_body = self
+ .db
+ .monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone())
+ .map_err(|e| {
+ MirEvalError::InFunction(
+ Box::new(MirEvalError::MirLowerError(imp, e)),
+ vec![(Either::Left(imp), span, locals.body.owner)],
+ )
+ })?;
+ MirOrDynIndex::Mir(mir_body)
+ };
+ self.mir_or_dyn_index_cache.borrow_mut().insert((def, generic_args), r.clone());
+ Ok(r)
}
fn exec_fn_with_args(
@@ -1835,10 +2216,11 @@ impl Evaluator<'_> {
def: FunctionId,
args: &[IntervalAndTy],
generic_args: Substitution,
- locals: &Locals<'_>,
+ locals: &Locals,
destination: Interval,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
if self.detect_and_exec_special_function(
def,
args,
@@ -1847,85 +2229,96 @@ impl Evaluator<'_> {
destination,
span,
)? {
- return Ok(());
+ return Ok(None);
}
- let arg_bytes =
- args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
- if let Some(self_ty_idx) =
- is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
- {
- // In the layout of current possible receiver, which at the moment of writing this code is one of
- // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
- // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
- // the type.
- let ty =
- self.vtable_map.ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?;
- let mut args_for_target = args.to_vec();
- args_for_target[0] = IntervalAndTy {
- interval: args_for_target[0].interval.slice(0..self.ptr_size()),
- ty: ty.clone(),
- };
- let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
- let generics_for_target =
- Substitution::from_iter(
+ let arg_bytes = args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval));
+ match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? {
+ MirOrDynIndex::Dyn(self_ty_idx) => {
+ // In the layout of current possible receiver, which at the moment of writing this code is one of
+ // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
+ // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
+ // the type.
+ let first_arg = arg_bytes.clone().next().unwrap();
+ let first_arg = first_arg.get(self)?;
+ let ty = self
+ .vtable_map
+ .ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?;
+ let mut args_for_target = args.to_vec();
+ args_for_target[0] = IntervalAndTy {
+ interval: args_for_target[0].interval.slice(0..self.ptr_size()),
+ ty: ty.clone(),
+ };
+ let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
+ let generics_for_target = Substitution::from_iter(
Interner,
- generic_args.iter(Interner).enumerate().map(|(i, x)| {
+ generic_args.iter(Interner).enumerate().map(|(i, it)| {
if i == self_ty_idx {
&ty
} else {
- x
+ it
}
}),
);
- return self.exec_fn_with_args(
- def,
- &args_for_target,
- generics_for_target,
+ return self.exec_fn_with_args(
+ def,
+ &args_for_target,
+ generics_for_target,
+ locals,
+ destination,
+ target_bb,
+ span,
+ );
+ }
+ MirOrDynIndex::Mir(body) => self.exec_looked_up_function(
+ body,
locals,
- destination,
+ def,
+ arg_bytes,
span,
- );
+ destination,
+ target_bb,
+ ),
}
- let (imp, generic_args) =
- lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args);
- self.exec_looked_up_function(generic_args, locals, imp, arg_bytes, span, destination)
}
fn exec_looked_up_function(
&mut self,
- generic_args: Substitution,
- locals: &Locals<'_>,
- imp: FunctionId,
- arg_bytes: Vec<Vec<u8>>,
+ mir_body: Arc<MirBody>,
+ locals: &Locals,
+ def: FunctionId,
+ arg_bytes: impl Iterator<Item = IntervalOrOwned>,
span: MirSpan,
destination: Interval,
- ) -> Result<()> {
- let def = imp.into();
- let mir_body = self
- .db
- .monomorphized_mir_body(def, generic_args, self.trait_env.clone())
- .map_err(|e| {
+ target_bb: Option<BasicBlockId>,
+ ) -> Result<Option<StackFrame>> {
+ Ok(if let Some(target_bb) = target_bb {
+ let (mut locals, prev_stack_ptr) =
+ self.create_locals_for_body(&mir_body, Some(destination))?;
+ self.fill_locals_for_body(&mir_body, &mut locals, arg_bytes.into_iter())?;
+ let span = (span, locals.body.owner);
+ Some(StackFrame { locals, destination: Some(target_bb), prev_stack_ptr, span })
+ } else {
+ let result = self.interpret_mir(mir_body, arg_bytes).map_err(|e| {
MirEvalError::InFunction(
- Either::Left(imp),
- Box::new(MirEvalError::MirLowerError(imp, e)),
- span,
- locals.body.owner,
+ Box::new(e),
+ vec![(Either::Left(def), span, locals.body.owner)],
)
})?;
- let result = self.interpret_mir(&mir_body, arg_bytes.iter().cloned()).map_err(|e| {
- MirEvalError::InFunction(Either::Left(imp), Box::new(e), span, locals.body.owner)
- })?;
- destination.write_from_bytes(self, &result)?;
- Ok(())
+ destination.write_from_bytes(self, &result)?;
+ None
+ })
}
fn exec_fn_trait(
&mut self,
+ def: FunctionId,
args: &[IntervalAndTy],
+ generic_args: Substitution,
+ locals: &Locals,
destination: Interval,
- locals: &Locals<'_>,
+ target_bb: Option<BasicBlockId>,
span: MirSpan,
- ) -> Result<()> {
+ ) -> Result<Option<StackFrame>> {
let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
let mut func_ty = func.ty.clone();
let mut func_data = func.interval;
@@ -1940,15 +2333,30 @@ impl Evaluator<'_> {
let size = self.size_of_sized(&func_ty, locals, "self type of fn trait")?;
func_data = Interval { addr: Address::from_bytes(func_data.get(self)?)?, size };
}
- match &func_ty.data(Interner).kind {
+ match &func_ty.kind(Interner) {
TyKind::FnDef(def, subst) => {
- self.exec_fn_def(*def, subst, destination, &args[1..], locals, span)?;
+ return self.exec_fn_def(
+ *def,
+ subst,
+ destination,
+ &args[1..],
+ locals,
+ target_bb,
+ span,
+ );
}
TyKind::Function(_) => {
- self.exec_fn_pointer(func_data, destination, &args[1..], locals, span)?;
+ return self.exec_fn_pointer(
+ func_data,
+ destination,
+ &args[1..],
+ locals,
+ target_bb,
+ span,
+ );
}
TyKind::Closure(closure, subst) => {
- self.exec_closure(
+ return self.exec_closure(
*closure,
func_data,
&Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()),
@@ -1956,14 +2364,45 @@ impl Evaluator<'_> {
&args[1..],
locals,
span,
- )?;
+ );
+ }
+ _ => {
+ // try to execute the manual impl of `FnTrait` for structs (nightly feature used in std)
+ let arg0 = func;
+ let args = &args[1..];
+ let arg1 = {
+ let ty = TyKind::Tuple(
+ args.len(),
+ Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())),
+ )
+ .intern(Interner);
+ let layout = self.layout(&ty)?;
+ let result = self.make_by_layout(
+ layout.size.bytes_usize(),
+ &layout,
+ None,
+ args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)),
+ )?;
+ // FIXME: there is some leak here
+ let size = layout.size.bytes_usize();
+ let addr = self.heap_allocate(size, layout.align.abi.bytes() as usize)?;
+ self.write_memory(addr, &result)?;
+ IntervalAndTy { interval: Interval { addr, size }, ty }
+ };
+ return self.exec_fn_with_args(
+ def,
+ &[arg0.clone(), arg1],
+ generic_args,
+ locals,
+ destination,
+ target_bb,
+ span,
+ );
}
- x => not_supported!("Call FnTrait methods with type {x:?}"),
}
- Ok(())
}
- fn eval_static(&mut self, st: StaticId, locals: &Locals<'_>) -> Result<Address> {
+ fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<Address> {
if let Some(o) = self.static_locations.get(&st) {
return Ok(*o);
};
@@ -1975,21 +2414,16 @@ impl Evaluator<'_> {
Box::new(e),
)
})?;
- let data = &konst.data(Interner);
- if let chalk_ir::ConstValue::Concrete(c) = &data.value {
- self.allocate_const_in_heap(&c, &data.ty, locals, &konst)?
- } else {
- not_supported!("unevaluatable static");
- }
+ self.allocate_const_in_heap(locals, &konst)?
} else {
let ty = &self.db.infer(st.into())[self.db.body(st.into()).body_expr];
let Some((size, align)) = self.size_align_of(&ty, locals)? else {
not_supported!("unsized extern static");
};
- let addr = self.heap_allocate(size, align);
+ let addr = self.heap_allocate(size, align)?;
Interval::new(addr, size)
};
- let addr = self.heap_allocate(self.ptr_size(), self.ptr_size());
+ let addr = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
self.write_memory(addr, &result.addr.to_bytes())?;
self.static_locations.insert(st, addr);
Ok(addr)
@@ -2011,13 +2445,13 @@ impl Evaluator<'_> {
}
}
- fn drop_place(&mut self, place: &Place, locals: &mut Locals<'_>, span: MirSpan) -> Result<()> {
+ fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<()> {
let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?;
if !locals.drop_flags.remove_place(place) {
return Ok(());
}
let metadata = match metadata {
- Some(x) => x.get(self)?.to_vec(),
+ Some(it) => it.get(self)?.to_vec(),
None => vec![],
};
self.run_drop_glue_deep(ty, locals, addr, &metadata, span)
@@ -2026,7 +2460,7 @@ impl Evaluator<'_> {
fn run_drop_glue_deep(
&mut self,
ty: Ty,
- locals: &Locals<'_>,
+ locals: &Locals,
addr: Address,
_metadata: &[u8],
span: MirSpan,
@@ -2039,20 +2473,19 @@ impl Evaluator<'_> {
// we can ignore drop in them.
return Ok(());
};
- let (impl_drop_candidate, subst) = lookup_impl_method(
- self.db,
- self.trait_env.clone(),
- drop_fn,
- Substitution::from1(Interner, ty.clone()),
- );
- if impl_drop_candidate != drop_fn {
+
+ let generic_args = Substitution::from1(Interner, ty.clone());
+ if let Ok(MirOrDynIndex::Mir(body)) =
+ self.get_mir_or_dyn_index(drop_fn, generic_args, locals, span)
+ {
self.exec_looked_up_function(
- subst,
+ body,
locals,
- impl_drop_candidate,
- vec![addr.to_bytes()],
+ drop_fn,
+ [IntervalOrOwned::Owned(addr.to_bytes())].into_iter(),
span,
Interval { addr: Address::Invalid(0), size: 0 },
+ None,
)?;
}
match ty.kind(Interner) {
@@ -2121,10 +2554,77 @@ impl Evaluator<'_> {
}
}
-pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] {
- let is_negative = is_signed && x.last().unwrap_or(&0) > &128;
+pub fn render_const_using_debug_impl(
+ db: &dyn HirDatabase,
+ owner: ConstId,
+ c: &Const,
+) -> Result<String> {
+ let mut evaluator = Evaluator::new(db, owner.into(), false, None);
+ let locals = &Locals {
+ ptr: ArenaMap::new(),
+ body: db
+ .mir_body(owner.into())
+ .map_err(|_| MirEvalError::NotSupported("unreachable".to_string()))?,
+ drop_flags: DropFlags::default(),
+ };
+ let data = evaluator.allocate_const_in_heap(locals, c)?;
+ let resolver = owner.resolver(db.upcast());
+ let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully(
+ db.upcast(),
+ &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
+ hir_expand::mod_path::PathKind::Abs,
+ [name![core], name![fmt], name![Debug]].into_iter(),
+ )),
+ ) else {
+ not_supported!("core::fmt::Debug not found");
+ };
+ let Some(debug_fmt_fn) = db.trait_data(debug_trait).method_by_name(&name![fmt]) else {
+ not_supported!("core::fmt::Debug::fmt not found");
+ };
+ // a1 = &[""]
+ let a1 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
+ // a2 = &[::core::fmt::ArgumentV1::new(&(THE_CONST), ::core::fmt::Debug::fmt)]
+ // FIXME: we should call the said function, but since its name is going to break in the next rustc version
+ // and its ABI doesn't break yet, we put it in memory manually.
+ let a2 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?;
+ evaluator.write_memory(a2, &data.addr.to_bytes())?;
+ let debug_fmt_fn_ptr = evaluator.vtable_map.id(TyKind::FnDef(
+ db.intern_callable_def(debug_fmt_fn.into()).into(),
+ Substitution::from1(Interner, c.data(Interner).ty.clone()),
+ )
+ .intern(Interner));
+ evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?;
+ // a3 = ::core::fmt::Arguments::new_v1(a1, a2)
+ // FIXME: similarly, we should call function here, not directly working with memory.
+ let a3 = evaluator.heap_allocate(evaluator.ptr_size() * 6, evaluator.ptr_size())?;
+ evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a1.to_bytes())?;
+ evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?;
+ evaluator.write_memory(a3.offset(4 * evaluator.ptr_size()), &a2.to_bytes())?;
+ evaluator.write_memory(a3.offset(5 * evaluator.ptr_size()), &[1])?;
+ let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully(
+ db.upcast(),
+ &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
+ hir_expand::mod_path::PathKind::Abs,
+ [name![std], name![fmt], name![format]].into_iter(),
+ )),
+ ) else {
+ not_supported!("std::fmt::format not found");
+ };
+ let message_string = evaluator.interpret_mir(
+ db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
+ [IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })]
+ .into_iter(),
+ )?;
+ let addr =
+ Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?;
+ let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]);
+ Ok(std::string::String::from_utf8_lossy(evaluator.read_memory(addr, size)?).into_owned())
+}
+
+pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] {
+ let is_negative = is_signed && it.last().unwrap_or(&0) > &127;
let fill_with = if is_negative { 255 } else { 0 };
- x.iter()
+ it.iter()
.copied()
.chain(iter::repeat(fill_with))
.take(16)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
index 3b9ef03c3..b2e29fd34 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs
@@ -3,20 +3,26 @@
use std::cmp;
+use chalk_ir::TyKind;
+use hir_def::resolver::HasResolver;
+use hir_expand::mod_path::ModPath;
+
use super::*;
+mod simd;
+
macro_rules! from_bytes {
($ty:tt, $value:expr) => {
($ty::from_le_bytes(match ($value).try_into() {
- Ok(x) => x,
+ Ok(it) => it,
Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
}))
};
}
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirEvalError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirEvalError::NotSupported(format!($it)))
};
}
@@ -26,10 +32,13 @@ impl Evaluator<'_> {
def: FunctionId,
args: &[IntervalAndTy],
generic_args: &Substitution,
- locals: &Locals<'_>,
+ locals: &Locals,
destination: Interval,
span: MirSpan,
) -> Result<bool> {
+ if self.not_special_fn_cache.borrow().contains(&def) {
+ return Ok(false);
+ }
let function_data = self.db.function_data(def);
let is_intrinsic = match &function_data.abi {
Some(abi) => *abi == Interned::new_str("rust-intrinsic"),
@@ -53,6 +62,28 @@ impl Evaluator<'_> {
)?;
return Ok(true);
}
+ let is_platform_intrinsic = match &function_data.abi {
+ Some(abi) => *abi == Interned::new_str("platform-intrinsic"),
+ None => match def.lookup(self.db.upcast()).container {
+ hir_def::ItemContainerId::ExternBlockId(block) => {
+ let id = block.lookup(self.db.upcast()).id;
+ id.item_tree(self.db.upcast())[id.value].abi.as_deref()
+ == Some("platform-intrinsic")
+ }
+ _ => false,
+ },
+ };
+ if is_platform_intrinsic {
+ self.exec_platform_intrinsic(
+ function_data.name.as_text().unwrap_or_default().as_str(),
+ args,
+ generic_args,
+ destination,
+ &locals,
+ span,
+ )?;
+ return Ok(true);
+ }
let is_extern_c = match def.lookup(self.db.upcast()).container {
hir_def::ItemContainerId::ExternBlockId(block) => {
let id = block.lookup(self.db.upcast()).id;
@@ -74,31 +105,110 @@ impl Evaluator<'_> {
let alloc_fn = function_data
.attrs
.iter()
- .filter_map(|x| x.path().as_ident())
- .filter_map(|x| x.as_str())
- .find(|x| {
+ .filter_map(|it| it.path().as_ident())
+ .filter_map(|it| it.as_str())
+ .find(|it| {
[
"rustc_allocator",
"rustc_deallocator",
"rustc_reallocator",
"rustc_allocator_zeroed",
]
- .contains(x)
+ .contains(it)
});
if let Some(alloc_fn) = alloc_fn {
self.exec_alloc_fn(alloc_fn, args, destination)?;
return Ok(true);
}
- if let Some(x) = self.detect_lang_function(def) {
+ if let Some(it) = self.detect_lang_function(def) {
let arg_bytes =
- args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
- let result = self.exec_lang_item(x, generic_args, &arg_bytes, locals, span)?;
+ args.iter().map(|it| Ok(it.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
+ let result = self.exec_lang_item(it, generic_args, &arg_bytes, locals, span)?;
destination.write_from_bytes(self, &result)?;
return Ok(true);
}
+ if let ItemContainerId::TraitId(t) = def.lookup(self.db.upcast()).container {
+ if self.db.lang_attr(t.into()) == Some(LangItem::Clone) {
+ let [self_ty] = generic_args.as_slice(Interner) else {
+ not_supported!("wrong generic arg count for clone");
+ };
+ let Some(self_ty) = self_ty.ty(Interner) else {
+ not_supported!("wrong generic arg kind for clone");
+ };
+ // Clone has special impls for tuples and function pointers
+ if matches!(self_ty.kind(Interner), TyKind::Function(_) | TyKind::Tuple(..)) {
+ self.exec_clone(def, args, self_ty.clone(), locals, destination, span)?;
+ return Ok(true);
+ }
+ // Return early to prevent caching clone as non special fn.
+ return Ok(false);
+ }
+ }
+ self.not_special_fn_cache.borrow_mut().insert(def);
Ok(false)
}
+ /// Clone has special impls for tuples and function pointers
+ fn exec_clone(
+ &mut self,
+ def: FunctionId,
+ args: &[IntervalAndTy],
+ self_ty: Ty,
+ locals: &Locals,
+ destination: Interval,
+ span: MirSpan,
+ ) -> Result<()> {
+ match self_ty.kind(Interner) {
+ TyKind::Function(_) => {
+ let [arg] = args else {
+ not_supported!("wrong arg count for clone");
+ };
+ let addr = Address::from_bytes(arg.get(self)?)?;
+ return destination
+ .write_from_interval(self, Interval { addr, size: destination.size });
+ }
+ TyKind::Tuple(_, subst) => {
+ let [arg] = args else {
+ not_supported!("wrong arg count for clone");
+ };
+ let addr = Address::from_bytes(arg.get(self)?)?;
+ let layout = self.layout(&self_ty)?;
+ for (i, ty) in subst.iter(Interner).enumerate() {
+ let ty = ty.assert_ty_ref(Interner);
+ let size = self.layout(ty)?.size.bytes_usize();
+ let tmp = self.heap_allocate(self.ptr_size(), self.ptr_size())?;
+ let arg = IntervalAndTy {
+ interval: Interval { addr: tmp, size: self.ptr_size() },
+ ty: TyKind::Ref(Mutability::Not, static_lifetime(), ty.clone())
+ .intern(Interner),
+ };
+ let offset = layout.fields.offset(i).bytes_usize();
+ self.write_memory(tmp, &addr.offset(offset).to_bytes())?;
+ self.exec_clone(
+ def,
+ &[arg],
+ ty.clone(),
+ locals,
+ destination.slice(offset..offset + size),
+ span,
+ )?;
+ }
+ }
+ _ => {
+ self.exec_fn_with_args(
+ def,
+ args,
+ Substitution::from1(Interner, self_ty),
+ locals,
+ destination,
+ None,
+ span,
+ )?;
+ }
+ }
+ Ok(())
+ }
+
fn exec_alloc_fn(
&mut self,
alloc_fn: &str,
@@ -112,7 +222,7 @@ impl Evaluator<'_> {
};
let size = from_bytes!(usize, size.get(self)?);
let align = from_bytes!(usize, align.get(self)?);
- let result = self.heap_allocate(size, align);
+ let result = self.heap_allocate(size, align)?;
destination.write_from_bytes(self, &result.to_bytes())?;
}
"rustc_deallocator" => { /* no-op for now */ }
@@ -120,14 +230,18 @@ impl Evaluator<'_> {
let [ptr, old_size, align, new_size] = args else {
return Err(MirEvalError::TypeError("rustc_allocator args are not provided"));
};
- let ptr = Address::from_bytes(ptr.get(self)?)?;
let old_size = from_bytes!(usize, old_size.get(self)?);
let new_size = from_bytes!(usize, new_size.get(self)?);
- let align = from_bytes!(usize, align.get(self)?);
- let result = self.heap_allocate(new_size, align);
- Interval { addr: result, size: old_size }
- .write_from_interval(self, Interval { addr: ptr, size: old_size })?;
- destination.write_from_bytes(self, &result.to_bytes())?;
+ if old_size >= new_size {
+ destination.write_from_interval(self, ptr.interval)?;
+ } else {
+ let ptr = Address::from_bytes(ptr.get(self)?)?;
+ let align = from_bytes!(usize, align.get(self)?);
+ let result = self.heap_allocate(new_size, align)?;
+ Interval { addr: result, size: old_size }
+ .write_from_interval(self, Interval { addr: ptr, size: old_size })?;
+ destination.write_from_bytes(self, &result.to_bytes())?;
+ }
}
_ => not_supported!("unknown alloc function"),
}
@@ -136,7 +250,7 @@ impl Evaluator<'_> {
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
use LangItem::*;
- let candidate = lang_attr(self.db.upcast(), def)?;
+ let candidate = self.db.lang_attr(def.into())?;
// We want to execute these functions with special logic
if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) {
return Some(candidate);
@@ -146,56 +260,35 @@ impl Evaluator<'_> {
fn exec_lang_item(
&mut self,
- x: LangItem,
+ it: LangItem,
generic_args: &Substitution,
args: &[Vec<u8>],
- locals: &Locals<'_>,
+ locals: &Locals,
span: MirSpan,
) -> Result<Vec<u8>> {
use LangItem::*;
let mut args = args.iter();
- match x {
+ match it {
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
PanicFmt => {
let message = (|| {
- let arguments_struct =
- self.db.lang_item(self.crate_id, LangItem::FormatArguments)?.as_struct()?;
- let arguments_layout = self
- .layout_adt(arguments_struct.into(), Substitution::empty(Interner))
- .ok()?;
- let arguments_field_pieces =
- self.db.struct_data(arguments_struct).variant_data.field(&name![pieces])?;
- let pieces_offset = arguments_layout
- .fields
- .offset(u32::from(arguments_field_pieces.into_raw()) as usize)
- .bytes_usize();
- let ptr_size = self.ptr_size();
- let arg = args.next()?;
- let pieces_array_addr =
- Address::from_bytes(&arg[pieces_offset..pieces_offset + ptr_size]).ok()?;
- let pieces_array_len = usize::from_le_bytes(
- (&arg[pieces_offset + ptr_size..pieces_offset + 2 * ptr_size])
- .try_into()
- .ok()?,
- );
- let mut message = "".to_string();
- for i in 0..pieces_array_len {
- let piece_ptr_addr = pieces_array_addr.offset(2 * i * ptr_size);
- let piece_addr =
- Address::from_bytes(self.read_memory(piece_ptr_addr, ptr_size).ok()?)
- .ok()?;
- let piece_len = usize::from_le_bytes(
- self.read_memory(piece_ptr_addr.offset(ptr_size), ptr_size)
- .ok()?
- .try_into()
- .ok()?,
- );
- let piece_data = self.read_memory(piece_addr, piece_len).ok()?;
- message += &std::string::String::from_utf8_lossy(piece_data);
- }
- Some(message)
+ let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast());
+ let Some(format_fn) = resolver.resolve_path_in_value_ns_fully(
+ self.db.upcast(),
+ &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments(
+ hir_expand::mod_path::PathKind::Abs,
+ [name![std], name![fmt], name![format]].into_iter(),
+ )),
+ ) else {
+ not_supported!("std::fmt::format not found");
+ };
+ let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { not_supported!("std::fmt::format is not a function") };
+ let message_string = self.interpret_mir(self.db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, args.map(|x| IntervalOrOwned::Owned(x.clone())))?;
+ let addr = Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?;
+ let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]);
+ Ok(std::string::String::from_utf8_lossy(self.read_memory(addr, size)?).into_owned())
})()
- .unwrap_or_else(|| "<format-args-evaluation-failed>".to_string());
+ .unwrap_or_else(|e| format!("Failed to render panic format args: {e:?}"));
Err(MirEvalError::Panic(message))
}
SliceLen => {
@@ -207,7 +300,7 @@ impl Evaluator<'_> {
}
DropInPlace => {
let ty =
- generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)).ok_or(
+ generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)).ok_or(
MirEvalError::TypeError(
"generic argument of drop_in_place is not provided",
),
@@ -224,7 +317,35 @@ impl Evaluator<'_> {
)?;
Ok(vec![])
}
- x => not_supported!("Executing lang item {x:?}"),
+ it => not_supported!("Executing lang item {it:?}"),
+ }
+ }
+
+ fn exec_syscall(
+ &mut self,
+ id: i64,
+ args: &[IntervalAndTy],
+ destination: Interval,
+ _locals: &Locals,
+ _span: MirSpan,
+ ) -> Result<()> {
+ match id {
+ 318 => {
+ // SYS_getrandom
+ let [buf, len, _flags] = args else {
+ return Err(MirEvalError::TypeError("SYS_getrandom args are not provided"));
+ };
+ let addr = Address::from_bytes(buf.get(self)?)?;
+ let size = from_bytes!(usize, len.get(self)?);
+ for i in 0..size {
+ let rand_byte = self.random_state.rand_u64() as u8;
+ self.write_memory(addr.offset(i), &[rand_byte])?;
+ }
+ destination.write_from_interval(self, len.interval)
+ }
+ _ => {
+ not_supported!("Unknown syscall id {id:?}")
+ }
}
}
@@ -234,8 +355,8 @@ impl Evaluator<'_> {
args: &[IntervalAndTy],
_generic_args: &Substitution,
destination: Interval,
- locals: &Locals<'_>,
- _span: MirSpan,
+ locals: &Locals,
+ span: MirSpan,
) -> Result<()> {
match as_str {
"memcmp" => {
@@ -299,7 +420,9 @@ impl Evaluator<'_> {
}
"pthread_getspecific" => {
let Some(arg0) = args.get(0) else {
- return Err(MirEvalError::TypeError("pthread_getspecific arg0 is not provided"));
+ return Err(MirEvalError::TypeError(
+ "pthread_getspecific arg0 is not provided",
+ ));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let value = self.thread_local_storage.get_key(key)?;
@@ -308,11 +431,15 @@ impl Evaluator<'_> {
}
"pthread_setspecific" => {
let Some(arg0) = args.get(0) else {
- return Err(MirEvalError::TypeError("pthread_setspecific arg0 is not provided"));
+ return Err(MirEvalError::TypeError(
+ "pthread_setspecific arg0 is not provided",
+ ));
};
let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]);
let Some(arg1) = args.get(1) else {
- return Err(MirEvalError::TypeError("pthread_setspecific arg1 is not provided"));
+ return Err(MirEvalError::TypeError(
+ "pthread_setspecific arg1 is not provided",
+ ));
};
let value = from_bytes!(u128, pad16(arg1.get(self)?, false));
self.thread_local_storage.set_key(key, value)?;
@@ -326,17 +453,52 @@ impl Evaluator<'_> {
destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?;
Ok(())
}
+ "syscall" => {
+ let Some((id, rest)) = args.split_first() else {
+ return Err(MirEvalError::TypeError(
+ "syscall arg1 is not provided",
+ ));
+ };
+ let id = from_bytes!(i64, id.get(self)?);
+ self.exec_syscall(id, rest, destination, locals, span)
+ }
+ "sched_getaffinity" => {
+ let [_pid, _set_size, set] = args else {
+ return Err(MirEvalError::TypeError("libc::write args are not provided"));
+ };
+ let set = Address::from_bytes(set.get(self)?)?;
+ // Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001
+ self.write_memory(set, &[1])?;
+ // return 0 as success
+ self.write_memory_using_ref(destination.addr, destination.size)?.fill(0);
+ Ok(())
+ }
_ => not_supported!("unknown external function {as_str}"),
}
}
+ fn exec_platform_intrinsic(
+ &mut self,
+ name: &str,
+ args: &[IntervalAndTy],
+ generic_args: &Substitution,
+ destination: Interval,
+ locals: &Locals,
+ span: MirSpan,
+ ) -> Result<()> {
+ if let Some(name) = name.strip_prefix("simd_") {
+ return self.exec_simd_intrinsic(name, args, generic_args, destination, locals, span);
+ }
+ not_supported!("unknown platform intrinsic {name}");
+ }
+
fn exec_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
destination: Interval,
- locals: &Locals<'_>,
+ locals: &Locals,
span: MirSpan,
) -> Result<()> {
if let Some(name) = name.strip_prefix("atomic_") {
@@ -347,7 +509,9 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "f64 intrinsic signature doesn't match fn (f64) -> f64",
+ ));
};
let arg = from_bytes!(f64, arg.get(self)?);
match name {
@@ -373,7 +537,9 @@ impl Evaluator<'_> {
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64, f64) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "f64 intrinsic signature doesn't match fn (f64, f64) -> f64",
+ ));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(f64, arg2.get(self)?);
@@ -387,7 +553,9 @@ impl Evaluator<'_> {
}
"powi" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("powif64 signature doesn't match fn (f64, i32) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "powif64 signature doesn't match fn (f64, i32) -> f64",
+ ));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(i32, arg2.get(self)?);
@@ -395,7 +563,9 @@ impl Evaluator<'_> {
}
"fma" => {
let [arg1, arg2, arg3] = args else {
- return Err(MirEvalError::TypeError("fmaf64 signature doesn't match fn (f64, f64, f64) -> f64"));
+ return Err(MirEvalError::TypeError(
+ "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64",
+ ));
};
let arg1 = from_bytes!(f64, arg1.get(self)?);
let arg2 = from_bytes!(f64, arg2.get(self)?);
@@ -411,7 +581,9 @@ impl Evaluator<'_> {
"sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs"
| "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "f32 intrinsic signature doesn't match fn (f32) -> f32",
+ ));
};
let arg = from_bytes!(f32, arg.get(self)?);
match name {
@@ -437,7 +609,9 @@ impl Evaluator<'_> {
}
"pow" | "minnum" | "maxnum" | "copysign" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32, f32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "f32 intrinsic signature doesn't match fn (f32, f32) -> f32",
+ ));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(f32, arg2.get(self)?);
@@ -451,7 +625,9 @@ impl Evaluator<'_> {
}
"powi" => {
let [arg1, arg2] = args else {
- return Err(MirEvalError::TypeError("powif32 signature doesn't match fn (f32, i32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "powif32 signature doesn't match fn (f32, i32) -> f32",
+ ));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(i32, arg2.get(self)?);
@@ -459,7 +635,9 @@ impl Evaluator<'_> {
}
"fma" => {
let [arg1, arg2, arg3] = args else {
- return Err(MirEvalError::TypeError("fmaf32 signature doesn't match fn (f32, f32, f32) -> f32"));
+ return Err(MirEvalError::TypeError(
+ "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32",
+ ));
};
let arg1 = from_bytes!(f32, arg1.get(self)?);
let arg2 = from_bytes!(f32, arg2.get(self)?);
@@ -472,21 +650,77 @@ impl Evaluator<'_> {
}
match name {
"size_of" => {
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let size = self.size_of_sized(ty, locals, "size_of arg")?;
destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
}
"min_align_of" | "pref_align_of" => {
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
return Err(MirEvalError::TypeError("align_of generic arg is not provided"));
};
let align = self.layout(ty)?.align.abi.bytes();
destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size])
}
+ "size_of_val" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError("size_of_val generic arg is not provided"));
+ };
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("size_of_val args are not provided"));
+ };
+ if let Some((size, _)) = self.size_align_of(ty, locals)? {
+ destination.write_from_bytes(self, &size.to_le_bytes())
+ } else {
+ let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
+ let (size, _) = self.size_align_of_unsized(ty, metadata, locals)?;
+ destination.write_from_bytes(self, &size.to_le_bytes())
+ }
+ }
+ "min_align_of_val" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
+ return Err(MirEvalError::TypeError("min_align_of_val generic arg is not provided"));
+ };
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("min_align_of_val args are not provided"));
+ };
+ if let Some((_, align)) = self.size_align_of(ty, locals)? {
+ destination.write_from_bytes(self, &align.to_le_bytes())
+ } else {
+ let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2);
+ let (_, align) = self.size_align_of_unsized(ty, metadata, locals)?;
+ destination.write_from_bytes(self, &align.to_le_bytes())
+ }
+ }
+ "type_name" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError("type_name generic arg is not provided"));
+ };
+ let ty_name = match ty.display_source_code(
+ self.db,
+ locals.body.owner.module(self.db.upcast()),
+ true,
+ ) {
+ Ok(ty_name) => ty_name,
+ // Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
+ // render full paths.
+ Err(_) => ty.display(self.db).to_string(),
+ };
+ let len = ty_name.len();
+ let addr = self.heap_allocate(len, 1)?;
+ self.write_memory(addr, ty_name.as_bytes())?;
+ destination.slice(0..self.ptr_size()).write_from_bytes(self, &addr.to_bytes())?;
+ destination
+ .slice(self.ptr_size()..2 * self.ptr_size())
+ .write_from_bytes(self, &len.to_le_bytes())
+ }
"needs_drop" => {
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
};
let result = !ty.clone().is_copy(self.db, locals.body.owner);
@@ -501,13 +735,17 @@ impl Evaluator<'_> {
let ans = lhs.get(self)? == rhs.get(self)?;
destination.write_from_bytes(self, &[u8::from(ans)])
}
- "saturating_add" => {
+ "saturating_add" | "saturating_sub" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("saturating_add args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
- let ans = lhs.saturating_add(rhs);
+ let ans = match name {
+ "saturating_add" => lhs.saturating_add(rhs),
+ "saturating_sub" => lhs.saturating_sub(rhs),
+ _ => unreachable!(),
+ };
let bits = destination.size * 8;
// FIXME: signed
let is_signed = false;
@@ -526,7 +764,22 @@ impl Evaluator<'_> {
let ans = lhs.wrapping_add(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
- "wrapping_sub" | "unchecked_sub" | "ptr_offset_from_unsigned" | "ptr_offset_from" => {
+ "ptr_offset_from_unsigned" | "ptr_offset_from" => {
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
+ };
+ let lhs = i128::from_le_bytes(pad16(lhs.get(self)?, false));
+ let rhs = i128::from_le_bytes(pad16(rhs.get(self)?, false));
+ let ans = lhs.wrapping_sub(rhs);
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError("ptr_offset_from generic arg is not provided"));
+ };
+ let size = self.size_of_sized(ty, locals, "ptr_offset_from arg")? as i128;
+ let ans = ans / size;
+ destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
+ }
+ "wrapping_sub" | "unchecked_sub" => {
let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("wrapping_sub args are not provided"));
};
@@ -544,6 +797,26 @@ impl Evaluator<'_> {
let ans = lhs.wrapping_mul(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
+ "wrapping_shl" | "unchecked_shl" => {
+ // FIXME: signed
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("unchecked_shl args are not provided"));
+ };
+ let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
+ let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
+ let ans = lhs.wrapping_shl(rhs as u32);
+ destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
+ }
+ "wrapping_shr" | "unchecked_shr" => {
+ // FIXME: signed
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("unchecked_shr args are not provided"));
+ };
+ let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
+ let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
+ let ans = lhs.wrapping_shr(rhs as u32);
+ destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
+ }
"unchecked_rem" => {
// FIXME: signed
let [lhs, rhs] = args else {
@@ -588,7 +861,7 @@ impl Evaluator<'_> {
_ => unreachable!(),
};
let is_overflow = u128overflow
- || ans.to_le_bytes()[op_size..].iter().any(|&x| x != 0 && x != 255);
+ || ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255);
let is_overflow = vec![u8::from(is_overflow)];
let layout = self.layout(&result_ty)?;
let result = self.make_by_layout(
@@ -603,10 +876,15 @@ impl Evaluator<'_> {
}
"copy" | "copy_nonoverlapping" => {
let [src, dst, offset] = args else {
- return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided"));
+ return Err(MirEvalError::TypeError(
+ "copy_nonoverlapping args are not provided",
+ ));
};
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
- return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided"));
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError(
+ "copy_nonoverlapping generic arg is not provided",
+ ));
};
let src = Address::from_bytes(src.get(self)?)?;
let dst = Address::from_bytes(dst.get(self)?)?;
@@ -621,7 +899,8 @@ impl Evaluator<'_> {
let [ptr, offset] = args else {
return Err(MirEvalError::TypeError("offset args are not provided"));
};
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
};
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
@@ -652,20 +931,106 @@ impl Evaluator<'_> {
}
"ctpop" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("likely arg is not provided"));
+ return Err(MirEvalError::TypeError("ctpop arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
+ "ctlz" | "ctlz_nonzero" => {
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("cttz arg is not provided"));
+ };
+ let result =
+ u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;
+ let result = result - (128 - arg.interval.size * 8);
+ destination
+ .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
+ }
"cttz" | "cttz_nonzero" => {
let [arg] = args else {
- return Err(MirEvalError::TypeError("likely arg is not provided"));
+ return Err(MirEvalError::TypeError("cttz arg is not provided"));
};
let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros();
destination
.write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size])
}
+ "rotate_left" => {
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("rotate_left args are not provided"));
+ };
+ let lhs = &lhs.get(self)?[0..destination.size];
+ let rhs = rhs.get(self)?[0] as u32;
+ match destination.size {
+ 1 => {
+ let r = from_bytes!(u8, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 2 => {
+ let r = from_bytes!(u16, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 4 => {
+ let r = from_bytes!(u32, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 8 => {
+ let r = from_bytes!(u64, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 16 => {
+ let r = from_bytes!(u128, lhs).rotate_left(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ s => not_supported!("destination with size {s} for rotate_left"),
+ }
+ }
+ "rotate_right" => {
+ let [lhs, rhs] = args else {
+ return Err(MirEvalError::TypeError("rotate_right args are not provided"));
+ };
+ let lhs = &lhs.get(self)?[0..destination.size];
+ let rhs = rhs.get(self)?[0] as u32;
+ match destination.size {
+ 1 => {
+ let r = from_bytes!(u8, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 2 => {
+ let r = from_bytes!(u16, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 4 => {
+ let r = from_bytes!(u32, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 8 => {
+ let r = from_bytes!(u64, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ 16 => {
+ let r = from_bytes!(u128, lhs).rotate_right(rhs);
+ destination.write_from_bytes(self, &r.to_le_bytes())
+ }
+ s => not_supported!("destination with size {s} for rotate_right"),
+ }
+ }
+ "discriminant_value" => {
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("discriminant_value arg is not provided"));
+ };
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError(
+ "discriminant_value generic arg is not provided",
+ ));
+ };
+ let addr = Address::from_bytes(arg.get(self)?)?;
+ let size = self.size_of_sized(ty, locals, "discriminant_value ptr type")?;
+ let interval = Interval { addr, size };
+ let r = self.compute_discriminant(ty.clone(), interval.get(self)?)?;
+ destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size])
+ }
"const_eval_select" => {
let [tuple, const_fn, _] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
@@ -681,24 +1046,126 @@ impl Evaluator<'_> {
let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
- self.exec_fn_trait(&args, destination, locals, span)
+ if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) {
+ if let Some(def) = target
+ .as_trait()
+ .and_then(|it| self.db.trait_data(it).method_by_name(&name![call_once]))
+ {
+ self.exec_fn_trait(
+ def,
+ &args,
+ // FIXME: wrong for manual impls of `FnOnce`
+ Substitution::empty(Interner),
+ locals,
+ destination,
+ None,
+ span,
+ )?;
+ return Ok(());
+ }
+ }
+ not_supported!("FnOnce was not available for executing const_eval_select");
+ }
+ "read_via_copy" | "volatile_load" => {
+ let [arg] = args else {
+ return Err(MirEvalError::TypeError("read_via_copy args are not provided"));
+ };
+ let addr = Address::from_bytes(arg.interval.get(self)?)?;
+ destination.write_from_interval(self, Interval { addr, size: destination.size })
+ }
+ "write_bytes" => {
+ let [dst, val, count] = args else {
+ return Err(MirEvalError::TypeError("write_bytes args are not provided"));
+ };
+ let count = from_bytes!(usize, count.get(self)?);
+ let val = from_bytes!(u8, val.get(self)?);
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner))
+ else {
+ return Err(MirEvalError::TypeError(
+ "write_bytes generic arg is not provided",
+ ));
+ };
+ let dst = Address::from_bytes(dst.get(self)?)?;
+ let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
+ let size = count * size;
+ self.write_memory_using_ref(dst, size)?.fill(val);
+ Ok(())
}
_ => not_supported!("unknown intrinsic {name}"),
}
}
+ fn size_align_of_unsized(
+ &mut self,
+ ty: &Ty,
+ metadata: Interval,
+ locals: &Locals,
+ ) -> Result<(usize, usize)> {
+ Ok(match ty.kind(Interner) {
+ TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1),
+ TyKind::Slice(inner) => {
+ let len = from_bytes!(usize, metadata.get(self)?);
+ let (size, align) = self.size_align_of_sized(inner, locals, "slice inner type")?;
+ (size * len, align)
+ }
+ TyKind::Dyn(_) => self.size_align_of_sized(
+ self.vtable_map.ty_of_bytes(metadata.get(self)?)?,
+ locals,
+ "dyn concrete type",
+ )?,
+ TyKind::Adt(id, subst) => {
+ let id = id.0;
+ let layout = self.layout_adt(id, subst.clone())?;
+ let id = match id {
+ AdtId::StructId(s) => s,
+ _ => not_supported!("unsized enum or union"),
+ };
+ let field_types = &self.db.field_types(id.into());
+ let last_field_ty =
+ field_types.iter().rev().next().unwrap().1.clone().substitute(Interner, subst);
+ let sized_part_size =
+ layout.fields.offset(field_types.iter().count() - 1).bytes_usize();
+ let sized_part_align = layout.align.abi.bytes() as usize;
+ let (unsized_part_size, unsized_part_align) =
+ self.size_align_of_unsized(&last_field_ty, metadata, locals)?;
+ let align = sized_part_align.max(unsized_part_align) as isize;
+ let size = (sized_part_size + unsized_part_size) as isize;
+ // Must add any necessary padding to `size`
+ // (to make it a multiple of `align`) before returning it.
+ //
+ // Namely, the returned size should be, in C notation:
+ //
+ // `size + ((size & (align-1)) ? align : 0)`
+ //
+ // emulated via the semi-standard fast bit trick:
+ //
+ // `(size + (align-1)) & -align`
+ let size = (size + (align - 1)) & (-align);
+ (size as usize, align as usize)
+ }
+ _ => not_supported!("unsized type other than str, slice, struct and dyn"),
+ })
+ }
+
fn exec_atomic_intrinsic(
&mut self,
name: &str,
args: &[IntervalAndTy],
generic_args: &Substitution,
destination: Interval,
- locals: &Locals<'_>,
+ locals: &Locals,
_span: MirSpan,
) -> Result<()> {
// We are a single threaded runtime with no UB checking and no optimization, so
- // we can implement these as normal functions.
- let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ // we can implement atomic intrinsics as normal functions.
+
+ if name.starts_with("singlethreadfence_") || name.starts_with("fence_") {
+ return Ok(());
+ }
+
+ // The rest of atomic intrinsics have exactly one generic arg
+
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided"));
};
let Some(arg0) = args.get(0) else {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs
new file mode 100644
index 000000000..ec7463104
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs
@@ -0,0 +1,177 @@
+//! Shim implementation for simd intrinsics
+
+use std::cmp::Ordering;
+
+use crate::TyKind;
+
+use super::*;
+
+macro_rules! from_bytes {
+ ($ty:tt, $value:expr) => {
+ ($ty::from_le_bytes(match ($value).try_into() {
+ Ok(it) => it,
+ Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
+ }))
+ };
+}
+
+macro_rules! not_supported {
+ ($it: expr) => {
+ return Err(MirEvalError::NotSupported(format!($it)))
+ };
+}
+
+impl Evaluator<'_> {
+ fn detect_simd_ty(&self, ty: &Ty) -> Result<(usize, Ty)> {
+ match ty.kind(Interner) {
+ TyKind::Adt(id, subst) => {
+ let len = match subst.as_slice(Interner).get(1).and_then(|it| it.constant(Interner))
+ {
+ Some(len) => len,
+ _ => {
+ if let AdtId::StructId(id) = id.0 {
+ let struct_data = self.db.struct_data(id);
+ let fields = struct_data.variant_data.fields();
+ let Some((first_field, _)) = fields.iter().next() else {
+ not_supported!("simd type with no field");
+ };
+ let field_ty = self.db.field_types(id.into())[first_field]
+ .clone()
+ .substitute(Interner, subst);
+ return Ok((fields.len(), field_ty));
+ }
+ return Err(MirEvalError::TypeError("simd type with no len param"));
+ }
+ };
+ match try_const_usize(self.db, len) {
+ Some(len) => {
+ let Some(ty) = subst.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else {
+ return Err(MirEvalError::TypeError("simd type with no ty param"));
+ };
+ Ok((len as usize, ty.clone()))
+ }
+ None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")),
+ }
+ }
+ _ => Err(MirEvalError::TypeError("simd type which is not a struct")),
+ }
+ }
+
+ pub(super) fn exec_simd_intrinsic(
+ &mut self,
+ name: &str,
+ args: &[IntervalAndTy],
+ _generic_args: &Substitution,
+ destination: Interval,
+ _locals: &Locals,
+ _span: MirSpan,
+ ) -> Result<()> {
+ match name {
+ "and" | "or" | "xor" => {
+ let [left, right] = args else {
+ return Err(MirEvalError::TypeError("simd bit op args are not provided"));
+ };
+ let result = left
+ .get(self)?
+ .iter()
+ .zip(right.get(self)?)
+ .map(|(&it, &y)| match name {
+ "and" => it & y,
+ "or" => it | y,
+ "xor" => it ^ y,
+ _ => unreachable!(),
+ })
+ .collect::<Vec<_>>();
+ destination.write_from_bytes(self, &result)
+ }
+ "eq" | "ne" | "lt" | "le" | "gt" | "ge" => {
+ let [left, right] = args else {
+ return Err(MirEvalError::TypeError("simd args are not provided"));
+ };
+ let (len, ty) = self.detect_simd_ty(&left.ty)?;
+ let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
+ let size = left.interval.size / len;
+ let dest_size = destination.size / len;
+ let mut destination_bytes = vec![];
+ let vector = left.get(self)?.chunks(size).zip(right.get(self)?.chunks(size));
+ for (l, r) in vector {
+ let mut result = Ordering::Equal;
+ for (l, r) in l.iter().zip(r).rev() {
+ let it = l.cmp(r);
+ if it != Ordering::Equal {
+ result = it;
+ break;
+ }
+ }
+ if is_signed {
+ if let Some((&l, &r)) = l.iter().zip(r).rev().next() {
+ if l != r {
+ result = (l as i8).cmp(&(r as i8));
+ }
+ }
+ }
+ let result = match result {
+ Ordering::Less => ["lt", "le", "ne"].contains(&name),
+ Ordering::Equal => ["ge", "le", "eq"].contains(&name),
+ Ordering::Greater => ["ge", "gt", "ne"].contains(&name),
+ };
+ let result = if result { 255 } else { 0 };
+ destination_bytes.extend(std::iter::repeat(result).take(dest_size));
+ }
+
+ destination.write_from_bytes(self, &destination_bytes)
+ }
+ "bitmask" => {
+ let [op] = args else {
+ return Err(MirEvalError::TypeError("simd_bitmask args are not provided"));
+ };
+ let (op_len, _) = self.detect_simd_ty(&op.ty)?;
+ let op_count = op.interval.size / op_len;
+ let mut result: u64 = 0;
+ for (i, val) in op.get(self)?.chunks(op_count).enumerate() {
+ if !val.iter().all(|&it| it == 0) {
+ result |= 1 << i;
+ }
+ }
+ destination.write_from_bytes(self, &result.to_le_bytes()[0..destination.size])
+ }
+ "shuffle" => {
+ let [left, right, index] = args else {
+ return Err(MirEvalError::TypeError("simd_shuffle args are not provided"));
+ };
+ let TyKind::Array(_, index_len) = index.ty.kind(Interner) else {
+ return Err(MirEvalError::TypeError(
+ "simd_shuffle index argument has non-array type",
+ ));
+ };
+ let index_len = match try_const_usize(self.db, index_len) {
+ Some(it) => it as usize,
+ None => {
+ return Err(MirEvalError::TypeError(
+ "simd type with unevaluatable len param",
+ ))
+ }
+ };
+ let (left_len, _) = self.detect_simd_ty(&left.ty)?;
+ let left_size = left.interval.size / left_len;
+ let vector =
+ left.get(self)?.chunks(left_size).chain(right.get(self)?.chunks(left_size));
+ let mut result = vec![];
+ for index in index.get(self)?.chunks(index.interval.size / index_len) {
+ let index = from_bytes!(u32, index) as usize;
+ let val = match vector.clone().nth(index) {
+ Some(it) => it,
+ None => {
+ return Err(MirEvalError::TypeError(
+ "out of bound access in simd shuffle",
+ ))
+ }
+ };
+ result.extend(val);
+ }
+ destination.write_from_bytes(self, &result)
+ }
+ _ => not_supported!("unknown simd intrinsic {name}"),
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
index ca4268b8f..46165cf3d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs
@@ -30,7 +30,7 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr
db.trait_environment(func_id.into()),
)
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
- let (result, stdout, stderr) = interpret_mir(db, &body, false);
+ let (result, stdout, stderr) = interpret_mir(db, body, false, None);
result?;
Ok((stdout, stderr))
}
@@ -183,6 +183,50 @@ fn main() {
}
#[test]
+fn drop_struct_field() {
+ check_pass(
+ r#"
+//- minicore: drop, add, option, cell, builtin_impls
+
+use core::cell::Cell;
+
+fn should_not_reach() {
+ _ // FIXME: replace this function with panic when that works
+}
+
+struct X<'a>(&'a Cell<i32>);
+impl<'a> Drop for X<'a> {
+ fn drop(&mut self) {
+ self.0.set(self.0.get() + 1)
+ }
+}
+
+struct Tuple<'a>(X<'a>, X<'a>, X<'a>);
+
+fn main() {
+ let s = Cell::new(0);
+ {
+ let x0 = X(&s);
+ let xt = Tuple(x0, X(&s), X(&s));
+ let x1 = xt.1;
+ if s.get() != 0 {
+ should_not_reach();
+ }
+ drop(xt.0);
+ if s.get() != 1 {
+ should_not_reach();
+ }
+ }
+ // FIXME: this should be 3
+ if s.get() != 2 {
+ should_not_reach();
+ }
+}
+"#,
+ );
+}
+
+#[test]
fn drop_in_place() {
check_pass(
r#"
@@ -614,6 +658,78 @@ fn main() {
}
#[test]
+fn self_with_capital_s() {
+ check_pass(
+ r#"
+//- minicore: fn, add, copy
+
+struct S1;
+
+impl S1 {
+ fn f() {
+ Self;
+ }
+}
+
+struct S2 {
+ f1: i32,
+}
+
+impl S2 {
+ fn f() {
+ Self { f1: 5 };
+ }
+}
+
+struct S3(i32);
+
+impl S3 {
+ fn f() {
+ Self(2);
+ Self;
+ let this = Self;
+ this(2);
+ }
+}
+
+fn main() {
+ S1::f();
+ S2::f();
+ S3::f();
+}
+ "#,
+ );
+}
+
+#[test]
+fn syscalls() {
+ check_pass(
+ r#"
+//- minicore: option
+
+extern "C" {
+ pub unsafe extern "C" fn syscall(num: i64, ...) -> i64;
+}
+
+const SYS_getrandom: i64 = 318;
+
+fn should_not_reach() {
+ _ // FIXME: replace this function with panic when that works
+}
+
+fn main() {
+ let mut x: i32 = 0;
+ let r = syscall(SYS_getrandom, &mut x, 4usize, 0);
+ if r != 4 {
+ should_not_reach();
+ }
+}
+
+"#,
+ )
+}
+
+#[test]
fn posix_tls() {
check_pass(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index 2cb29b4ab..718df8331 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -146,12 +146,12 @@ impl MirLowerError {
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
}
}
- MirLowerError::MissingFunctionDefinition(owner, x) => {
+ MirLowerError::MissingFunctionDefinition(owner, it) => {
let body = db.body(*owner);
writeln!(
f,
"Missing function definition for {}",
- body.pretty_print_expr(db.upcast(), *owner, *x)
+ body.pretty_print_expr(db.upcast(), *owner, *it)
)?;
}
MirLowerError::TypeMismatch(e) => {
@@ -202,15 +202,15 @@ impl MirLowerError {
}
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirLowerError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirLowerError::NotSupported(format!($it)))
};
}
macro_rules! implementation_error {
- ($x: expr) => {{
- ::stdx::never!("MIR lower implementation bug: {}", format!($x));
- return Err(MirLowerError::ImplementationError(format!($x)));
+ ($it: expr) => {{
+ ::stdx::never!("MIR lower implementation bug: {}", format!($it));
+ return Err(MirLowerError::ImplementationError(format!($it)));
}};
}
@@ -310,24 +310,30 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
}
Adjust::Deref(_) => {
- let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)? else {
- return Ok(None);
- };
+ let Some((p, current)) =
+ self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)?
+ else {
+ return Ok(None);
+ };
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
Ok(Some(current))
}
Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
- let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
- return Ok(None);
- };
+ let Some((p, current)) =
+ self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
+ else {
+ return Ok(None);
+ };
let bk = BorrowKind::from_chalk(*m);
self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
Ok(Some(current))
}
Adjust::Pointer(cast) => {
- let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
- return Ok(None);
- };
+ let Some((p, current)) =
+ self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
+ else {
+ return Ok(None);
+ };
self.push_assignment(
current,
place,
@@ -373,45 +379,49 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
Err(MirLowerError::IncompleteExpr)
- },
+ }
Expr::Path(p) => {
- let pr = if let Some((assoc, subst)) = self
- .infer
- .assoc_resolutions_for_expr(expr_id)
- {
- match assoc {
- hir_def::AssocItemId::ConstId(c) => {
- self.lower_const(c.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
- return Ok(Some(current))
- },
- hir_def::AssocItemId::FunctionId(_) => {
- // FnDefs are zero sized, no action is needed.
- return Ok(Some(current))
+ let pr =
+ if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) {
+ match assoc {
+ hir_def::AssocItemId::ConstId(c) => {
+ self.lower_const(
+ c.into(),
+ current,
+ place,
+ subst,
+ expr_id.into(),
+ self.expr_ty_without_adjust(expr_id),
+ )?;
+ return Ok(Some(current));
+ }
+ hir_def::AssocItemId::FunctionId(_) => {
+ // FnDefs are zero sized, no action is needed.
+ return Ok(Some(current));
+ }
+ hir_def::AssocItemId::TypeAliasId(_) => {
+ // FIXME: If it is unreachable, use proper error instead of `not_supported`.
+ not_supported!("associated functions and types")
+ }
}
- hir_def::AssocItemId::TypeAliasId(_) => {
- // FIXME: If it is unreachable, use proper error instead of `not_supported`.
- not_supported!("associated functions and types")
- },
- }
- } else if let Some(variant) = self
- .infer
- .variant_resolution_for_expr(expr_id)
- {
- match variant {
- VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
- VariantId::StructId(s) => ValueNs::StructId(s),
- VariantId::UnionId(_) => implementation_error!("Union variant as path"),
- }
- } else {
- let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
- let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
- resolver
- .resolve_path_in_value_ns_fully(self.db.upcast(), p)
- .ok_or_else(unresolved_name)?
- };
+ } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
+ match variant {
+ VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
+ VariantId::StructId(s) => ValueNs::StructId(s),
+ VariantId::UnionId(_) => implementation_error!("Union variant as path"),
+ }
+ } else {
+ let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
+ resolver
+ .resolve_path_in_value_ns_fully(self.db.upcast(), p)
+ .ok_or_else(unresolved_name)?
+ };
match pr {
ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
- let Some((temp, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, false)? else {
+ let Some((temp, current)) =
+ self.lower_expr_as_place_without_adjust(current, expr_id, false)?
+ else {
return Ok(None);
};
self.push_assignment(
@@ -423,11 +433,19 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(Some(current))
}
ValueNs::ConstId(const_id) => {
- self.lower_const(const_id.into(), current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
+ self.lower_const(
+ const_id.into(),
+ current,
+ place,
+ Substitution::empty(Interner),
+ expr_id.into(),
+ self.expr_ty_without_adjust(expr_id),
+ )?;
Ok(Some(current))
}
ValueNs::EnumVariantId(variant_id) => {
- let variant_data = &self.db.enum_data(variant_id.parent).variants[variant_id.local_id];
+ let variant_data =
+ &self.db.enum_data(variant_id.parent).variants[variant_id.local_id];
if variant_data.variant_data.kind() == StructKind::Unit {
let ty = self.infer.type_of_expr[expr_id].clone();
current = self.lower_enum_variant(
@@ -468,17 +486,16 @@ impl<'ctx> MirLowerCtx<'ctx> {
);
Ok(Some(current))
}
- ValueNs::FunctionId(_) | ValueNs::StructId(_) => {
+ ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::ImplSelf(_) => {
// It's probably a unit struct or a zero sized function, so no action is needed.
Ok(Some(current))
}
- x => {
- not_supported!("unknown name {x:?} in value name space");
- }
}
}
Expr::If { condition, then_branch, else_branch } => {
- let Some((discr, current)) = self.lower_expr_to_some_operand(*condition, current)? else {
+ let Some((discr, current)) =
+ self.lower_expr_to_some_operand(*condition, current)?
+ else {
return Ok(None);
};
let start_of_then = self.new_basic_block();
@@ -501,15 +518,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()))
}
Expr::Let { pat, expr } => {
- let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
return Ok(None);
};
- let (then_target, else_target) = self.pattern_match(
- current,
- None,
- cond_place,
- *pat,
- )?;
+ let (then_target, else_target) =
+ self.pattern_match(current, None, cond_place, *pat)?;
self.write_bytes_to_place(
then_target,
place.clone(),
@@ -533,49 +547,35 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Block { id: _, statements, tail, label } => {
if let Some(label) = label {
- self.lower_loop(current, place.clone(), Some(*label), expr_id.into(), |this, begin| {
- if let Some(current) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? {
- let end = this.current_loop_end()?;
- this.set_goto(current, end, expr_id.into());
- }
- Ok(())
- })
+ self.lower_loop(
+ current,
+ place.clone(),
+ Some(*label),
+ expr_id.into(),
+ |this, begin| {
+ if let Some(current) = this.lower_block_to_place(
+ statements,
+ begin,
+ *tail,
+ place,
+ expr_id.into(),
+ )? {
+ let end = this.current_loop_end()?;
+ this.set_goto(current, end, expr_id.into());
+ }
+ Ok(())
+ },
+ )
} else {
self.lower_block_to_place(statements, current, *tail, place, expr_id.into())
}
}
- Expr::Loop { body, label } => self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
- let scope = this.push_drop_scope();
- if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
- current = scope.pop_and_drop(this, current);
- this.set_goto(current, begin, expr_id.into());
- } else {
- scope.pop_assume_dropped(this);
- }
- Ok(())
- }),
- Expr::While { condition, body, label } => {
- self.lower_loop(current, place, *label, expr_id.into(),|this, begin| {
+ Expr::Loop { body, label } => {
+ self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
let scope = this.push_drop_scope();
- let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else {
- return Ok(());
- };
- let fail_cond = this.new_basic_block();
- let after_cond = this.new_basic_block();
- this.set_terminator(
- to_switch,
- TerminatorKind::SwitchInt {
- discr,
- targets: SwitchTargets::static_if(1, after_cond, fail_cond),
- },
- expr_id.into(),
- );
- let fail_cond = this.drop_until_scope(this.drop_scopes.len() - 1, fail_cond);
- let end = this.current_loop_end()?;
- this.set_goto(fail_cond, end, expr_id.into());
- if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? {
- let block = scope.pop_and_drop(this, block);
- this.set_goto(block, begin, expr_id.into());
+ if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
+ current = scope.pop_and_drop(this, current);
+ this.set_goto(current, begin, expr_id.into());
} else {
scope.pop_assume_dropped(this);
}
@@ -583,8 +583,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
})
}
Expr::Call { callee, args, .. } => {
- if let Some((func_id, generic_args)) =
- self.infer.method_resolution(expr_id) {
+ if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
let ty = chalk_ir::TyKind::FnDef(
CallableDefId::FunctionId(func_id).to_chalk(self.db),
generic_args,
@@ -601,24 +600,51 @@ impl<'ctx> MirLowerCtx<'ctx> {
);
}
let callee_ty = self.expr_ty_after_adjustments(*callee);
- match &callee_ty.data(Interner).kind {
+ match &callee_ty.kind(Interner) {
chalk_ir::TyKind::FnDef(..) => {
let func = Operand::from_bytes(vec![], callee_ty.clone());
- self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
+ self.lower_call_and_args(
+ func,
+ args.iter().copied(),
+ place,
+ current,
+ self.is_uninhabited(expr_id),
+ expr_id.into(),
+ )
}
chalk_ir::TyKind::Function(_) => {
- let Some((func, current)) = self.lower_expr_to_some_operand(*callee, current)? else {
+ let Some((func, current)) =
+ self.lower_expr_to_some_operand(*callee, current)?
+ else {
return Ok(None);
};
- self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into())
+ self.lower_call_and_args(
+ func,
+ args.iter().copied(),
+ place,
+ current,
+ self.is_uninhabited(expr_id),
+ expr_id.into(),
+ )
+ }
+ TyKind::Closure(_, _) => {
+ not_supported!(
+ "method resolution not emitted for closure (Are Fn traits available?)"
+ );
+ }
+ TyKind::Error => {
+ return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id))
}
- TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id)),
_ => return Err(MirLowerError::TypeError("function call on bad type")),
}
}
Expr::MethodCall { receiver, args, method_name, .. } => {
let (func_id, generic_args) =
- self.infer.method_resolution(expr_id).ok_or_else(|| MirLowerError::UnresolvedMethod(method_name.display(self.db.upcast()).to_string()))?;
+ self.infer.method_resolution(expr_id).ok_or_else(|| {
+ MirLowerError::UnresolvedMethod(
+ method_name.display(self.db.upcast()).to_string(),
+ )
+ })?;
let func = Operand::from_fn(self.db, func_id, generic_args);
self.lower_call_and_args(
func,
@@ -630,23 +656,27 @@ impl<'ctx> MirLowerCtx<'ctx> {
)
}
Expr::Match { expr, arms } => {
- let Some((cond_place, mut current)) = self.lower_expr_as_place(current, *expr, true)?
+ let Some((cond_place, mut current)) =
+ self.lower_expr_as_place(current, *expr, true)?
else {
return Ok(None);
};
let mut end = None;
for MatchArm { pat, guard, expr } in arms.iter() {
- let (then, mut otherwise) = self.pattern_match(
- current,
- None,
- cond_place.clone(),
- *pat,
- )?;
+ let (then, mut otherwise) =
+ self.pattern_match(current, None, cond_place.clone(), *pat)?;
let then = if let &Some(guard) = guard {
let next = self.new_basic_block();
let o = otherwise.get_or_insert_with(|| self.new_basic_block());
if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? {
- self.set_terminator(c, TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }, expr_id.into());
+ self.set_terminator(
+ c,
+ TerminatorKind::SwitchInt {
+ discr,
+ targets: SwitchTargets::static_if(1, next, *o),
+ },
+ expr_id.into(),
+ );
}
next
} else {
@@ -672,33 +702,53 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Continue { label } => {
let loop_data = match label {
- Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?,
- None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?,
+ Some(l) => {
+ self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?
+ }
+ None => self
+ .current_loop_blocks
+ .as_ref()
+ .ok_or(MirLowerError::ContinueWithoutLoop)?,
};
let begin = loop_data.begin;
current = self.drop_until_scope(loop_data.drop_scope_index, current);
self.set_goto(current, begin, expr_id.into());
Ok(None)
- },
+ }
&Expr::Break { expr, label } => {
if let Some(expr) = expr {
let loop_data = match label {
- Some(l) => self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?,
- None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::BreakWithoutLoop)?,
+ Some(l) => self
+ .labeled_loop_blocks
+ .get(&l)
+ .ok_or(MirLowerError::UnresolvedLabel)?,
+ None => self
+ .current_loop_blocks
+ .as_ref()
+ .ok_or(MirLowerError::BreakWithoutLoop)?,
};
- let Some(c) = self.lower_expr_to_place(expr, loop_data.place.clone(), current)? else {
+ let Some(c) =
+ self.lower_expr_to_place(expr, loop_data.place.clone(), current)?
+ else {
return Ok(None);
};
current = c;
}
let (end, drop_scope) = match label {
Some(l) => {
- let loop_blocks = self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?;
- (loop_blocks.end.expect("We always generate end for labeled loops"), loop_blocks.drop_scope_index)
- },
- None => {
- (self.current_loop_end()?, self.current_loop_blocks.as_ref().unwrap().drop_scope_index)
- },
+ let loop_blocks = self
+ .labeled_loop_blocks
+ .get(&l)
+ .ok_or(MirLowerError::UnresolvedLabel)?;
+ (
+ loop_blocks.end.expect("We always generate end for labeled loops"),
+ loop_blocks.drop_scope_index,
+ )
+ }
+ None => (
+ self.current_loop_end()?,
+ self.current_loop_blocks.as_ref().unwrap().drop_scope_index,
+ ),
};
current = self.drop_until_scope(drop_scope, current);
self.set_goto(current, end, expr_id.into());
@@ -706,7 +756,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Return { expr } => {
if let Some(expr) = expr {
- if let Some(c) = self.lower_expr_to_place(*expr, return_slot().into(), current)? {
+ if let Some(c) =
+ self.lower_expr_to_place(*expr, return_slot().into(), current)?
+ {
current = c;
} else {
return Ok(None);
@@ -719,19 +771,17 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Yield { .. } => not_supported!("yield"),
Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => {
let spread_place = match spread {
- &Some(x) => {
- let Some((p, c)) = self.lower_expr_as_place(current, x, true)? else {
+ &Some(it) => {
+ let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else {
return Ok(None);
};
current = c;
Some(p)
- },
+ }
None => None,
};
- let variant_id = self
- .infer
- .variant_resolution_for_expr(expr_id)
- .ok_or_else(|| match path {
+ let variant_id =
+ self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
None => MirLowerError::RecordLiteralWithoutPath,
})?;
@@ -746,7 +796,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
for RecordLitField { name, expr } in fields.iter() {
let field_id =
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
- let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)?
+ else {
return Ok(None);
};
current = c;
@@ -758,18 +809,23 @@ impl<'ctx> MirLowerCtx<'ctx> {
Rvalue::Aggregate(
AggregateKind::Adt(variant_id, subst),
match spread_place {
- Some(sp) => operands.into_iter().enumerate().map(|(i, x)| {
- match x {
- Some(x) => x,
+ Some(sp) => operands
+ .into_iter()
+ .enumerate()
+ .map(|(i, it)| match it {
+ Some(it) => it,
None => {
- let p = sp.project(ProjectionElem::Field(FieldId {
- parent: variant_id,
- local_id: LocalFieldId::from_raw(RawIdx::from(i as u32)),
- }));
+ let p =
+ sp.project(ProjectionElem::Field(FieldId {
+ parent: variant_id,
+ local_id: LocalFieldId::from_raw(
+ RawIdx::from(i as u32),
+ ),
+ }));
Operand::Copy(p)
- },
- }
- }).collect(),
+ }
+ })
+ .collect(),
None => operands.into_iter().collect::<Option<_>>().ok_or(
MirLowerError::TypeError("missing field in record literal"),
)?,
@@ -785,7 +841,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
let local_id =
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
- let place = place.project(PlaceElem::Field(FieldId { parent: union_id.into(), local_id }));
+ let place = place.project(PlaceElem::Field(FieldId {
+ parent: union_id.into(),
+ local_id,
+ }));
self.lower_expr_to_place(*expr, place, current)
}
}
@@ -795,11 +854,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Async { .. } => not_supported!("async block"),
&Expr::Const(id) => {
let subst = self.placeholder_subst();
- self.lower_const(id.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?;
+ self.lower_const(
+ id.into(),
+ current,
+ place,
+ subst,
+ expr_id.into(),
+ self.expr_ty_without_adjust(expr_id),
+ )?;
Ok(Some(current))
- },
+ }
Expr::Cast { expr, type_ref: _ } => {
- let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
return Ok(None);
};
let source_ty = self.infer[*expr].clone();
@@ -807,7 +873,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(
current,
place,
- Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, x, target_ty),
+ Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, it, target_ty),
expr_id.into(),
);
Ok(Some(current))
@@ -822,23 +888,37 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Box { expr } => {
let ty = self.expr_ty_after_adjustments(*expr);
- self.push_assignment(current, place.clone(), Rvalue::ShallowInitBoxWithAlloc(ty), expr_id.into());
- let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ self.push_assignment(
+ current,
+ place.clone(),
+ Rvalue::ShallowInitBoxWithAlloc(ty),
+ expr_id.into(),
+ );
+ let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
+ else {
return Ok(None);
};
let p = place.project(ProjectionElem::Deref);
self.push_assignment(current, p, operand.into(), expr_id.into());
Ok(Some(current))
- },
- Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => {
- let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else {
+ }
+ Expr::Field { .. }
+ | Expr::Index { .. }
+ | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => {
+ let Some((p, current)) =
+ self.lower_expr_as_place_without_adjust(current, expr_id, true)?
+ else {
return Ok(None);
};
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
Ok(Some(current))
}
- Expr::UnaryOp { expr, op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg) } => {
- let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ Expr::UnaryOp {
+ expr,
+ op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg),
+ } => {
+ let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)?
+ else {
return Ok(None);
};
let operation = match op {
@@ -853,7 +933,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
expr_id.into(),
);
Ok(Some(current))
- },
+ }
Expr::BinaryOp { lhs, rhs, op } => {
let op = op.ok_or(MirLowerError::IncompleteExpr)?;
let is_builtin = 'b: {
@@ -861,16 +941,19 @@ impl<'ctx> MirLowerCtx<'ctx> {
// for binary operator, and use without adjust to simplify our conditions.
let lhs_ty = self.expr_ty_without_adjust(*lhs);
let rhs_ty = self.expr_ty_without_adjust(*rhs);
- if matches!(op ,BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) {
+ if matches!(op, BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) {
if lhs_ty.as_raw_ptr().is_some() && rhs_ty.as_raw_ptr().is_some() {
break 'b true;
}
}
let builtin_inequal_impls = matches!(
op,
- BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr)
+ | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) }
);
- lhs_ty.is_scalar() && rhs_ty.is_scalar() && (lhs_ty == rhs_ty || builtin_inequal_impls)
+ lhs_ty.is_scalar()
+ && rhs_ty.is_scalar()
+ && (lhs_ty == rhs_ty || builtin_inequal_impls)
};
if !is_builtin {
if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) {
@@ -892,34 +975,34 @@ impl<'ctx> MirLowerCtx<'ctx> {
.infer
.expr_adjustments
.get(lhs)
- .and_then(|x| x.split_last())
- .map(|x| x.1)
- .ok_or(MirLowerError::TypeError("adjustment of binary op was missing"))?;
+ .and_then(|it| it.split_last())
+ .map(|it| it.1)
+ .ok_or(MirLowerError::TypeError(
+ "adjustment of binary op was missing",
+ ))?;
let Some((lhs_place, current)) =
self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)?
else {
return Ok(None);
};
- let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ let Some((rhs_op, current)) =
+ self.lower_expr_to_some_operand(*rhs, current)?
+ else {
return Ok(None);
};
- let r_value = Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place.clone()), rhs_op);
+ let r_value = Rvalue::CheckedBinaryOp(
+ op.into(),
+ Operand::Copy(lhs_place.clone()),
+ rhs_op,
+ );
self.push_assignment(current, lhs_place, r_value, expr_id.into());
return Ok(Some(current));
} else {
- let Some((lhs_place, current)) =
- self.lower_expr_as_place(current, *lhs, false)?
- else {
- return Ok(None);
- };
- let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
- return Ok(None);
- };
- self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
- return Ok(Some(current));
+ return self.lower_assignment(current, *lhs, *rhs, expr_id.into());
}
}
- let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else {
+ let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)?
+ else {
return Ok(None);
};
if let hir_def::hir::BinaryOp::LogicOp(op) = op {
@@ -928,22 +1011,31 @@ impl<'ctx> MirLowerCtx<'ctx> {
syntax::ast::LogicOp::Or => 1,
};
let start_of_then = self.new_basic_block();
- self.push_assignment(start_of_then, place.clone(), lhs_op.clone().into(), expr_id.into());
+ self.push_assignment(
+ start_of_then,
+ place.clone(),
+ lhs_op.clone().into(),
+ expr_id.into(),
+ );
let end_of_then = Some(start_of_then);
let start_of_else = self.new_basic_block();
- let end_of_else =
- self.lower_expr_to_place(*rhs, place, start_of_else)?;
+ let end_of_else = self.lower_expr_to_place(*rhs, place, start_of_else)?;
self.set_terminator(
current,
TerminatorKind::SwitchInt {
discr: lhs_op,
- targets: SwitchTargets::static_if(value_to_short, start_of_then, start_of_else),
+ targets: SwitchTargets::static_if(
+ value_to_short,
+ start_of_then,
+ start_of_else,
+ ),
},
expr_id.into(),
);
return Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into()));
}
- let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)?
+ else {
return Ok(None);
};
self.push_assignment(
@@ -976,15 +1068,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
let mut lp = None;
let mut rp = None;
- if let Some(x) = lhs {
- let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
+ if let Some(it) = lhs {
+ let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
return Ok(None);
};
lp = Some(o);
current = c;
}
- if let Some(x) = rhs {
- let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else {
+ if let Some(it) = rhs {
+ let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else {
return Ok(None);
};
rp = Some(o);
@@ -995,20 +1087,28 @@ impl<'ctx> MirLowerCtx<'ctx> {
place,
Rvalue::Aggregate(
AggregateKind::Adt(st.into(), subst.clone()),
- self.db.struct_data(st).variant_data.fields().iter().map(|x| {
- let o = match x.1.name.as_str() {
- Some("start") => lp.take(),
- Some("end") => rp.take(),
- Some("exhausted") => Some(Operand::from_bytes(vec![0], TyBuilder::bool())),
- _ => None,
- };
- o.ok_or(MirLowerError::UnresolvedField)
- }).collect::<Result<_>>()?,
+ self.db
+ .struct_data(st)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|it| {
+ let o = match it.1.name.as_str() {
+ Some("start") => lp.take(),
+ Some("end") => rp.take(),
+ Some("exhausted") => {
+ Some(Operand::from_bytes(vec![0], TyBuilder::bool()))
+ }
+ _ => None,
+ };
+ o.ok_or(MirLowerError::UnresolvedField)
+ })
+ .collect::<Result<_>>()?,
),
expr_id.into(),
);
Ok(Some(current))
- },
+ }
Expr::Closure { .. } => {
let ty = self.expr_ty_without_adjust(expr_id);
let TyKind::Closure(id, _) = ty.kind(Interner) else {
@@ -1020,22 +1120,33 @@ impl<'ctx> MirLowerCtx<'ctx> {
for capture in captures.iter() {
let p = Place {
local: self.binding_local(capture.place.local)?,
- projection: capture.place.projections.clone().into_iter().map(|x| {
- match x {
+ projection: capture
+ .place
+ .projections
+ .clone()
+ .into_iter()
+ .map(|it| match it {
ProjectionElem::Deref => ProjectionElem::Deref,
- ProjectionElem::Field(x) => ProjectionElem::Field(x),
- ProjectionElem::TupleOrClosureField(x) => ProjectionElem::TupleOrClosureField(x),
- ProjectionElem::ConstantIndex { offset, from_end } => ProjectionElem::ConstantIndex { offset, from_end },
- ProjectionElem::Subslice { from, to } => ProjectionElem::Subslice { from, to },
- ProjectionElem::OpaqueCast(x) => ProjectionElem::OpaqueCast(x),
- ProjectionElem::Index(x) => match x { },
- }
- }).collect(),
+ ProjectionElem::Field(it) => ProjectionElem::Field(it),
+ ProjectionElem::TupleOrClosureField(it) => {
+ ProjectionElem::TupleOrClosureField(it)
+ }
+ ProjectionElem::ConstantIndex { offset, from_end } => {
+ ProjectionElem::ConstantIndex { offset, from_end }
+ }
+ ProjectionElem::Subslice { from, to } => {
+ ProjectionElem::Subslice { from, to }
+ }
+ ProjectionElem::OpaqueCast(it) => ProjectionElem::OpaqueCast(it),
+ ProjectionElem::Index(it) => match it {},
+ })
+ .collect(),
};
match &capture.kind {
CaptureKind::ByRef(bk) => {
let placeholder_subst = self.placeholder_subst();
- let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst);
+ let tmp_ty =
+ capture.ty.clone().substitute(Interner, &placeholder_subst);
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
self.push_assignment(
current,
@@ -1044,7 +1155,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
capture.span,
);
operands.push(Operand::Move(tmp));
- },
+ }
CaptureKind::ByValue => operands.push(Operand::Move(p)),
}
}
@@ -1055,18 +1166,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
expr_id.into(),
);
Ok(Some(current))
- },
+ }
Expr::Tuple { exprs, is_assignee_expr: _ } => {
let Some(values) = exprs
- .iter()
- .map(|x| {
- let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
- return Ok(None);
- };
- current = c;
- Ok(Some(o))
- })
- .collect::<Result<Option<_>>>()?
+ .iter()
+ .map(|it| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
else {
return Ok(None);
};
@@ -1079,7 +1190,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
Expr::Array(l) => match l {
Array::ElementList { elements, .. } => {
- let elem_ty = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
+ let elem_ty = match &self.expr_ty_without_adjust(expr_id).kind(Interner) {
TyKind::Array(ty, _) => ty.clone(),
_ => {
return Err(MirLowerError::TypeError(
@@ -1088,30 +1199,29 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
};
let Some(values) = elements
- .iter()
- .map(|x| {
- let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
- return Ok(None);
- };
- current = c;
- Ok(Some(o))
- })
- .collect::<Result<Option<_>>>()?
+ .iter()
+ .map(|it| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
else {
return Ok(None);
};
- let r = Rvalue::Aggregate(
- AggregateKind::Array(elem_ty),
- values,
- );
+ let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values);
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
}
Array::Repeat { initializer, .. } => {
- let Some((init, current)) = self.lower_expr_to_some_operand(*initializer, current)? else {
+ let Some((init, current)) =
+ self.lower_expr_to_some_operand(*initializer, current)?
+ else {
return Ok(None);
};
- let len = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind {
+ let len = match &self.expr_ty_without_adjust(expr_id).kind(Interner) {
TyKind::Array(_, len) => len.clone(),
_ => {
return Err(MirLowerError::TypeError(
@@ -1122,7 +1232,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let r = Rvalue::Repeat(init, len);
self.push_assignment(current, place, r, expr_id.into());
Ok(Some(current))
- },
+ }
},
Expr::Literal(l) => {
let ty = self.expr_ty_without_adjust(expr_id);
@@ -1134,9 +1244,33 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
+ fn lower_assignment(
+ &mut self,
+ current: BasicBlockId,
+ lhs: ExprId,
+ rhs: ExprId,
+ span: MirSpan,
+ ) -> Result<Option<BasicBlockId>> {
+ let Some((rhs_op, current)) =
+ self.lower_expr_to_some_operand(rhs, current)?
+ else {
+ return Ok(None);
+ };
+ if matches!(&self.body.exprs[lhs], Expr::Underscore) {
+ return Ok(Some(current));
+ }
+ let Some((lhs_place, current)) =
+ self.lower_expr_as_place(current, lhs, false)?
+ else {
+ return Ok(None);
+ };
+ self.push_assignment(current, lhs_place, rhs_op.into(), span);
+ Ok(Some(current))
+ }
+
fn placeholder_subst(&mut self) -> Substitution {
let placeholder_subst = match self.owner.as_generic_def_id() {
- Some(x) => TyBuilder::placeholder_subst(self.db, x),
+ Some(it) => TyBuilder::placeholder_subst(self.db, it),
None => Substitution::empty(Interner),
};
placeholder_subst
@@ -1192,7 +1326,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
let size = self
.db
- .layout_of_ty(ty.clone(), self.owner.module(self.db.upcast()).krate())?
+ .layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))?
.size
.bytes_usize();
let bytes = match l {
@@ -1206,7 +1340,6 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(Operand::from_concrete_const(data, mm, ty));
}
hir_def::hir::Literal::CString(b) => {
- let b = b.as_bytes();
let bytes = b.iter().copied().chain(iter::once(0)).collect::<Vec<_>>();
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2);
@@ -1226,8 +1359,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
hir_def::hir::Literal::Bool(b) => vec![*b as u8],
- hir_def::hir::Literal::Int(x, _) => x.to_le_bytes()[0..size].into(),
- hir_def::hir::Literal::Uint(x, _) => x.to_le_bytes()[0..size].into(),
+ hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(),
+ hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(),
hir_def::hir::Literal::Float(f, _) => match size {
8 => f.into_f64().to_le_bytes().into(),
4 => f.into_f32().to_le_bytes().into(),
@@ -1269,7 +1402,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
} else {
let name = const_id.name(self.db.upcast());
self.db
- .const_eval(const_id.into(), subst)
+ .const_eval(const_id.into(), subst, None)
.map_err(|e| MirLowerError::ConstEvalError(name, Box::new(e)))?
};
Ok(Operand::Constant(c))
@@ -1377,9 +1510,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
let mut ty = None;
- if let Some(x) = self.infer.expr_adjustments.get(&e) {
- if let Some(x) = x.last() {
- ty = Some(x.target.clone());
+ if let Some(it) = self.infer.expr_adjustments.get(&e) {
+ if let Some(it) = it.last() {
+ ty = Some(it.target.clone());
}
}
ty.unwrap_or_else(|| self.expr_ty_without_adjust(e))
@@ -1401,7 +1534,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn discr_temp_place(&mut self, current: BasicBlockId) -> Place {
match &self.discr_temp {
- Some(x) => x.clone(),
+ Some(it) => it.clone(),
None => {
let tmp: Place = self
.temp(TyBuilder::discr_ty(), current, MirSpan::Unknown)
@@ -1448,7 +1581,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn has_adjustments(&self, expr_id: ExprId) -> bool {
- !self.infer.expr_adjustments.get(&expr_id).map(|x| x.is_empty()).unwrap_or(true)
+ !self.infer.expr_adjustments.get(&expr_id).map(|it| it.is_empty()).unwrap_or(true)
}
fn merge_blocks(
@@ -1478,7 +1611,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
))?
.end
{
- Some(x) => x,
+ Some(it) => it,
None => {
let s = self.new_basic_block();
self.current_loop_blocks
@@ -1602,10 +1735,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
pick_binding: impl Fn(BindingId) -> bool,
) -> Result<BasicBlockId> {
let base_param_count = self.result.param_locals.len();
- self.result.param_locals.extend(params.clone().map(|(x, ty)| {
+ self.result.param_locals.extend(params.clone().map(|(it, ty)| {
let local_id = self.result.locals.alloc(Local { ty });
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
- if let Pat::Bind { id, subpat: None } = self.body[x] {
+ if let Pat::Bind { id, subpat: None } = self.body[it] {
if matches!(
self.body.bindings[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
@@ -1646,7 +1779,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn binding_local(&self, b: BindingId) -> Result<LocalId> {
match self.result.binding_locals.get(b) {
- Some(x) => Ok(*x),
+ Some(it) => Ok(*it),
None => {
// FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which
// is a hir lowering problem IMO.
@@ -1731,6 +1864,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) {
+ (TyKind::FnDef(..), TyKind::Function(_)) => CastKind::Pointer(PointerCast::ReifyFnPointer),
(TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) {
(chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat,
(chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt,
@@ -1742,17 +1876,17 @@ fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
(TyKind::Raw(_, a) | TyKind::Ref(_, _, a), TyKind::Raw(_, b) | TyKind::Ref(_, _, b)) => {
CastKind::Pointer(if a == b {
PointerCast::MutToConstPointer
- } else if matches!(a.kind(Interner), TyKind::Slice(_) | TyKind::Str)
- && matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Str)
+ } else if matches!(b.kind(Interner), TyKind::Slice(_))
+ && matches!(a.kind(Interner), TyKind::Array(_, _))
+ || matches!(b.kind(Interner), TyKind::Dyn(_))
{
- // slice to slice cast is no-op (metadata is not touched), so we use this
- PointerCast::MutToConstPointer
- } else if matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) {
PointerCast::Unsize
} else if matches!(a.kind(Interner), TyKind::Slice(s) if s == b) {
PointerCast::ArrayToPointer
} else {
- // cast between two sized pointer, like *const i32 to *const i8. There is no specific variant
+ // cast between two sized pointer, like *const i32 to *const i8, or two unsized pointer, like
+ // slice to slice, slice to str, ... . These are no-ops (even in the unsized case, no metadata
+ // will be touched) but there is no specific variant
// for it in `PointerCast` so we use `MutToConstPointer`
PointerCast::MutToConstPointer
})
@@ -1796,7 +1930,7 @@ pub fn mir_body_for_closure_query(
implementation_error!("closure has not callable sig");
};
let current = ctx.lower_params_and_bindings(
- args.iter().zip(sig.params().iter()).map(|(x, y)| (*x, y.clone())),
+ args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())),
|_| true,
)?;
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
@@ -1815,34 +1949,35 @@ pub fn mir_body_for_closure_query(
FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref],
};
ctx.result.walk_places(|p| {
- if let Some(x) = upvar_map.get(&p.local) {
- let r = x.iter().find(|x| {
- if p.projection.len() < x.0.place.projections.len() {
+ if let Some(it) = upvar_map.get(&p.local) {
+ let r = it.iter().find(|it| {
+ if p.projection.len() < it.0.place.projections.len() {
return false;
}
- for (x, y) in p.projection.iter().zip(x.0.place.projections.iter()) {
- match (x, y) {
+ for (it, y) in p.projection.iter().zip(it.0.place.projections.iter()) {
+ match (it, y) {
(ProjectionElem::Deref, ProjectionElem::Deref) => (),
- (ProjectionElem::Field(x), ProjectionElem::Field(y)) if x == y => (),
+ (ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
(
- ProjectionElem::TupleOrClosureField(x),
+ ProjectionElem::TupleOrClosureField(it),
ProjectionElem::TupleOrClosureField(y),
- ) if x == y => (),
+ ) if it == y => (),
_ => return false,
}
}
true
});
match r {
- Some(x) => {
+ Some(it) => {
p.local = closure_local;
let mut next_projs = closure_projection.clone();
- next_projs.push(PlaceElem::TupleOrClosureField(x.1));
+ next_projs.push(PlaceElem::TupleOrClosureField(it.1));
let prev_projs = mem::take(&mut p.projection);
- if x.0.kind != CaptureKind::ByValue {
+ if it.0.kind != CaptureKind::ByValue {
next_projs.push(ProjectionElem::Deref);
}
- next_projs.extend(prev_projs.iter().cloned().skip(x.0.place.projections.len()));
+ next_projs
+ .extend(prev_projs.iter().cloned().skip(it.0.place.projections.len()));
p.projection = next_projs.into();
}
None => err = Some(p.clone()),
@@ -1902,8 +2037,8 @@ pub fn lower_to_mir(
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<MirBody> {
- if let Some((_, x)) = infer.type_mismatches().next() {
- return Err(MirLowerError::TypeMismatch(x.clone()));
+ if let Some((_, it)) = infer.type_mismatches().next() {
+ return Err(MirLowerError::TypeMismatch(it.clone()));
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
// 0 is return local
@@ -1929,7 +2064,7 @@ pub fn lower_to_mir(
body.params
.iter()
.zip(callable_sig.params().iter())
- .map(|(x, y)| (*x, y.clone())),
+ .map(|(it, y)| (*it, y.clone())),
binding_picker,
)?;
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
index d2c8d9a08..213f151ab 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
@@ -1,12 +1,12 @@
//! MIR lowering for places
use super::*;
-use hir_def::{lang_item::lang_attr, FunctionId};
+use hir_def::FunctionId;
use hir_expand::name;
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirLowerError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirLowerError::NotSupported(format!($it)))
};
}
@@ -18,7 +18,9 @@ impl MirLowerCtx<'_> {
) -> Result<Option<(Place, BasicBlockId)>> {
let ty = self.expr_ty_without_adjust(expr_id);
let place = self.temp(ty, prev_block, expr_id.into())?;
- let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
+ let Some(current) =
+ self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)?
+ else {
return Ok(None);
};
Ok(Some((place.into(), current)))
@@ -32,10 +34,12 @@ impl MirLowerCtx<'_> {
) -> Result<Option<(Place, BasicBlockId)>> {
let ty = adjustments
.last()
- .map(|x| x.target.clone())
+ .map(|it| it.target.clone())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id));
let place = self.temp(ty, prev_block, expr_id.into())?;
- let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
+ let Some(current) =
+ self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)?
+ else {
return Ok(None);
};
Ok(Some((place.into(), current)))
@@ -57,16 +61,17 @@ impl MirLowerCtx<'_> {
if let Some((last, rest)) = adjustments.split_last() {
match last.kind {
Adjust::Deref(None) => {
- let Some(mut x) = self.lower_expr_as_place_with_adjust(
+ let Some(mut it) = self.lower_expr_as_place_with_adjust(
current,
expr_id,
upgrade_rvalue,
rest,
- )? else {
+ )?
+ else {
return Ok(None);
};
- x.0 = x.0.project(ProjectionElem::Deref);
- Ok(Some(x))
+ it.0 = it.0.project(ProjectionElem::Deref);
+ Ok(Some(it))
}
Adjust::Deref(Some(od)) => {
let Some((r, current)) = self.lower_expr_as_place_with_adjust(
@@ -74,14 +79,15 @@ impl MirLowerCtx<'_> {
expr_id,
upgrade_rvalue,
rest,
- )? else {
+ )?
+ else {
return Ok(None);
};
self.lower_overloaded_deref(
current,
r,
rest.last()
- .map(|x| x.target.clone())
+ .map(|it| it.target.clone())
.unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)),
last.target.clone(),
expr_id.into(),
@@ -156,7 +162,7 @@ impl MirLowerCtx<'_> {
let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) {
TyKind::Ref(..) | TyKind::Raw(..) => true,
TyKind::Adt(id, _) => {
- if let Some(lang_item) = lang_attr(self.db.upcast(), id.0) {
+ if let Some(lang_item) = self.db.lang_attr(id.0.into()) {
lang_item == LangItem::OwnedBox
} else {
false
@@ -165,7 +171,8 @@ impl MirLowerCtx<'_> {
_ => false,
};
if !is_builtin {
- let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
return Ok(None);
};
return self.lower_overloaded_deref(
@@ -192,7 +199,8 @@ impl MirLowerCtx<'_> {
},
);
}
- let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
return Ok(None);
};
r = r.project(ProjectionElem::Deref);
@@ -217,12 +225,18 @@ impl MirLowerCtx<'_> {
)
{
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
- return Err(MirLowerError::UnresolvedMethod("[overloaded index]".to_string()));
+ return Err(MirLowerError::UnresolvedMethod(
+ "[overloaded index]".to_string(),
+ ));
};
- let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else {
+ let Some((base_place, current)) =
+ self.lower_expr_as_place(current, *base, true)?
+ else {
return Ok(None);
};
- let Some((index_operand, current)) = self.lower_expr_to_some_operand(*index, current)? else {
+ let Some((index_operand, current)) =
+ self.lower_expr_to_some_operand(*index, current)?
+ else {
return Ok(None);
};
return self.lower_overloaded_index(
@@ -239,8 +253,8 @@ impl MirLowerCtx<'_> {
.infer
.expr_adjustments
.get(base)
- .and_then(|x| x.split_last())
- .map(|x| x.1)
+ .and_then(|it| it.split_last())
+ .map(|it| it.1)
.unwrap_or(&[]);
let Some((mut p_base, current)) =
self.lower_expr_as_place_with_adjust(current, *base, true, adjusts)?
@@ -249,7 +263,8 @@ impl MirLowerCtx<'_> {
};
let l_index =
self.temp(self.expr_ty_after_adjustments(*index), current, expr_id.into())?;
- let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
+ let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)?
+ else {
return Ok(None);
};
p_base = p_base.project(ProjectionElem::Index(l_index));
@@ -282,7 +297,15 @@ impl MirLowerCtx<'_> {
)
.intern(Interner),
);
- let Some(current) = self.lower_call(index_fn_op, Box::new([Operand::Copy(place), index_operand]), result.clone(), current, false, span)? else {
+ let Some(current) = self.lower_call(
+ index_fn_op,
+ Box::new([Operand::Copy(place), index_operand]),
+ result.clone(),
+ current,
+ false,
+ span,
+ )?
+ else {
return Ok(None);
};
result = result.project(ProjectionElem::Deref);
@@ -329,7 +352,15 @@ impl MirLowerCtx<'_> {
.intern(Interner),
);
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
- let Some(current) = self.lower_call(deref_fn_op, Box::new([Operand::Copy(ref_place)]), result.clone(), current, false, span)? else {
+ let Some(current) = self.lower_call(
+ deref_fn_op,
+ Box::new([Operand::Copy(ref_place)]),
+ result.clone(),
+ current,
+ false,
+ span,
+ )?
+ else {
return Ok(None);
};
result = result.project(ProjectionElem::Deref);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
index ff43c64a9..3354cbd76 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -307,6 +307,11 @@ impl MirLowerCtx<'_> {
mode,
)?,
None => {
+ // The path is not a variant, so it is a const
+ if mode != MatchingMode::Check {
+ // A const don't bind anything. Only needs check.
+ return Ok((current, current_else));
+ }
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let resolver = self.owner.resolver(self.db.upcast());
let pr = resolver
@@ -362,8 +367,8 @@ impl MirLowerCtx<'_> {
},
Pat::Lit(l) => match &self.body.exprs[*l] {
Expr::Literal(l) => {
- let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
if mode == MatchingMode::Check {
+ let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
self.pattern_match_const(current_else, current, c, cond_place, pattern)?
} else {
(current, current_else)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
index ce3f7a8e5..c565228d9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
@@ -13,15 +13,14 @@ use chalk_ir::{
fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
ConstData, DebruijnIndex,
};
-use hir_def::{DefWithBodyId, GeneralConstId};
+use hir_def::DefWithBodyId;
use triomphe::Arc;
use crate::{
- consteval::unknown_const,
+ consteval::{intern_const_scalar, unknown_const},
db::HirDatabase,
from_placeholder_idx,
infer::normalize,
- method_resolution::lookup_impl_const,
utils::{generics, Generics},
ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
};
@@ -29,8 +28,8 @@ use crate::{
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
macro_rules! not_supported {
- ($x: expr) => {
- return Err(MirLowerError::NotSupported(format!($x)))
+ ($it: expr) => {
+ return Err(MirLowerError::NotSupported(format!($it)))
};
}
@@ -97,16 +96,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> {
- let x = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
+ let it = from_placeholder_idx(self.db, idx);
+ let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
- .and_then(|x| x.constant(Interner))
- .ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
+ .and_then(|it| it.constant(Interner))
+ .ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
.clone())
}
@@ -115,16 +114,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
idx: chalk_ir::PlaceholderIndex,
_outer_binder: DebruijnIndex,
) -> std::result::Result<Ty, Self::Error> {
- let x = from_placeholder_idx(self.db, idx);
- let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else {
+ let it = from_placeholder_idx(self.db, idx);
+ let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else {
not_supported!("missing idx in generics");
};
Ok(self
.subst
.as_slice(Interner)
.get(idx)
- .and_then(|x| x.ty(Interner))
- .ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))?
+ .and_then(|it| it.ty(Interner))
+ .ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))?
.clone())
}
@@ -180,7 +179,7 @@ impl Filler<'_> {
MirLowerError::GenericArgNotProvided(
self.generics
.as_ref()
- .and_then(|x| x.iter().nth(b.index))
+ .and_then(|it| it.iter().nth(b.index))
.unwrap()
.0,
self.subst.clone(),
@@ -193,25 +192,12 @@ impl Filler<'_> {
| chalk_ir::ConstValue::Placeholder(_) => {}
chalk_ir::ConstValue::Concrete(cc) => match &cc.interned {
crate::ConstScalar::UnevaluatedConst(const_id, subst) => {
- let mut const_id = *const_id;
let mut subst = subst.clone();
self.fill_subst(&mut subst)?;
- if let GeneralConstId::ConstId(c) = const_id {
- let (c, s) = lookup_impl_const(
- self.db,
- self.db.trait_environment_for_body(self.owner),
- c,
- subst,
- );
- const_id = GeneralConstId::ConstId(c);
- subst = s;
- }
- let result =
- self.db.const_eval(const_id.into(), subst).map_err(|e| {
- let name = const_id.name(self.db.upcast());
- MirLowerError::ConstEvalError(name, Box::new(e))
- })?;
- *c = result;
+ *c = intern_const_scalar(
+ crate::ConstScalar::UnevaluatedConst(*const_id, subst),
+ c.data(Interner).ty.clone(),
+ );
}
crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (),
},
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
index ac23e77bd..781ffaeca 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
@@ -135,7 +135,7 @@ impl<'a> MirPrettyCtx<'a> {
fn for_closure(&mut self, closure: ClosureId) {
let body = match self.db.mir_body_for_closure(closure) {
- Ok(x) => x,
+ Ok(it) => it,
Err(e) => {
wln!(self, "// error in {closure:?}: {e:?}");
return;
@@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> {
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
- local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(),
+ local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(),
result,
indent,
..*self
@@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
}
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
- let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect();
+ let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect();
MirPrettyCtx {
body,
db,
@@ -315,17 +315,17 @@ impl<'a> MirPrettyCtx<'a> {
}
}
}
- ProjectionElem::TupleOrClosureField(x) => {
+ ProjectionElem::TupleOrClosureField(it) => {
f(this, local, head);
- w!(this, ".{}", x);
+ w!(this, ".{}", it);
}
ProjectionElem::Index(l) => {
f(this, local, head);
w!(this, "[{}]", this.local_name(*l).display(this.db));
}
- x => {
+ it => {
f(this, local, head);
- w!(this, ".{:?}", x);
+ w!(this, ".{:?}", it);
}
}
}
@@ -356,14 +356,14 @@ impl<'a> MirPrettyCtx<'a> {
}
self.place(p);
}
- Rvalue::Aggregate(AggregateKind::Tuple(_), x) => {
+ Rvalue::Aggregate(AggregateKind::Tuple(_), it) => {
w!(self, "(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
- Rvalue::Aggregate(AggregateKind::Array(_), x) => {
+ Rvalue::Aggregate(AggregateKind::Array(_), it) => {
w!(self, "[");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, "]");
}
Rvalue::Repeat(op, len) => {
@@ -371,19 +371,19 @@ impl<'a> MirPrettyCtx<'a> {
self.operand(op);
w!(self, "; {}]", len.display(self.db));
}
- Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
+ Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
w!(self, "Adt(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
- Rvalue::Aggregate(AggregateKind::Closure(_), x) => {
+ Rvalue::Aggregate(AggregateKind::Closure(_), it) => {
w!(self, "Closure(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
- Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
+ Rvalue::Aggregate(AggregateKind::Union(_, _), it) => {
w!(self, "Union(");
- self.operand_list(x);
+ self.operand_list(it);
w!(self, ")");
}
Rvalue::Len(p) => {
@@ -428,8 +428,8 @@ impl<'a> MirPrettyCtx<'a> {
}
}
- fn operand_list(&mut self, x: &[Operand]) {
- let mut it = x.iter();
+ fn operand_list(&mut self, it: &[Operand]) {
+ let mut it = it.iter();
if let Some(first) = it.next() {
self.operand(first);
for op in it {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index 857141280..d22d0d85c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -30,7 +30,7 @@ use syntax::{
ast::{self, AstNode, HasName},
SyntaxNode,
};
-use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
+use tracing_subscriber::{layer::SubscriberExt, Registry};
use tracing_tree::HierarchicalLayer;
use triomphe::Arc;
@@ -52,7 +52,8 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
return None;
}
- let filter = EnvFilter::from_env("CHALK_DEBUG");
+ let filter: tracing_subscriber::filter::Targets =
+ env::var("CHALK_DEBUG").ok().and_then(|it| it.parse().ok()).unwrap_or_default();
let layer = HierarchicalLayer::default()
.with_indent_lines(true)
.with_ansi(false)
@@ -205,7 +206,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
let Some(node) = (match expr_or_pat {
hir_def::hir::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
- }) else { continue; };
+ }) else {
+ continue;
+ };
let range = node.as_ref().original_file_range(&db);
let actual = format!(
"expected {}, got {}",
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
index 425432479..e75b037e3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/display_source_code.rs
@@ -227,3 +227,22 @@ fn f(a: impl Foo<i8, Assoc<i16> = i32>) {
"#,
);
}
+
+#[test]
+fn fn_def_is_shown_as_fn_ptr() {
+ check_types_source_code(
+ r#"
+fn foo(_: i32) -> i64 { 42 }
+struct S<T>(T);
+enum E { A(usize) }
+fn test() {
+ let f = foo;
+ //^ fn(i32) -> i64
+ let f = S::<i8>;
+ //^ fn(i8) -> S<i8>
+ let f = E::A;
+ //^ fn(usize) -> E
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
index 111ac0b61..1e6e946a1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs
@@ -202,13 +202,15 @@ fn expr_macro_def_expanded_in_various_places() {
100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
100..119 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
- 100..119 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
+ 100..119 'for _ ...!() {}': Option<IntoIterator::Item<isize>>
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
- 104..105 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
+ 104..105 '_': IntoIterator::Item<isize>
117..119 '{}': ()
124..134 '|| spam!()': impl Fn() -> isize
+ 140..156 'while ...!() {}': !
+ 140..156 'while ...!() {}': ()
140..156 'while ...!() {}': ()
154..156 '{}': ()
161..174 'break spam!()': !
@@ -293,13 +295,15 @@ fn expr_macro_rules_expanded_in_various_places() {
114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize>
114..133 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item>
- 114..133 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>>
+ 114..133 'for _ ...!() {}': Option<IntoIterator::Item<isize>>
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
- 118..119 '_': Iterator::Item<IntoIterator::IntoIter<isize>>
+ 118..119 '_': IntoIterator::Item<isize>
131..133 '{}': ()
138..148 '|| spam!()': impl Fn() -> isize
+ 154..170 'while ...!() {}': !
+ 154..170 'while ...!() {}': ()
154..170 'while ...!() {}': ()
168..170 '{}': ()
175..188 'break spam!()': !
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
index 1e57a4ae2..c837fae3f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -1216,6 +1216,73 @@ fn main() {
}
#[test]
+fn inherent_method_deref_raw() {
+ check_types(
+ r#"
+struct Val;
+
+impl Val {
+ pub fn method(self: *const Val) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let foo: *const Val;
+ foo.method();
+ // ^^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn inherent_method_ref_self_deref_raw() {
+ check_types(
+ r#"
+struct Val;
+
+impl Val {
+ pub fn method(&self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let foo: *const Val;
+ foo.method();
+ // ^^^^^^^^^^^^ {unknown}
+}
+"#,
+ );
+}
+
+#[test]
+fn trait_method_deref_raw() {
+ check_types(
+ r#"
+trait Trait {
+ fn method(self: *const Self) -> u32;
+}
+
+struct Val;
+
+impl Trait for Val {
+ fn method(self: *const Self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let foo: *const Val;
+ foo.method();
+ // ^^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
fn method_on_dyn_impl() {
check_types(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
index 59046c043..5d809b823 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs
@@ -412,17 +412,23 @@ fn diverging_expression_3_break() {
355..654 '{ ...; }; }': ()
398..399 'x': u32
407..433 '{ whil...; }; }': u32
+ 409..430 'while ...eak; }': !
+ 409..430 'while ...eak; }': ()
409..430 'while ...eak; }': ()
415..419 'true': bool
420..430 '{ break; }': ()
422..427 'break': !
537..538 'x': u32
546..564 '{ whil... {}; }': u32
+ 548..561 'while true {}': !
+ 548..561 'while true {}': ()
548..561 'while true {}': ()
554..558 'true': bool
559..561 '{}': ()
615..616 'x': u32
624..651 '{ whil...; }; }': u32
+ 626..648 'while ...urn; }': !
+ 626..648 'while ...urn; }': ()
626..648 'while ...urn; }': ()
632..636 'true': bool
637..648 '{ return; }': ()
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index 047900a32..6ea059065 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -1240,11 +1240,11 @@ fn test() {
16..66 'for _ ... }': IntoIterator::IntoIter<()>
16..66 'for _ ... }': &mut IntoIterator::IntoIter<()>
16..66 'for _ ... }': fn next<IntoIterator::IntoIter<()>>(&mut IntoIterator::IntoIter<()>) -> Option<<IntoIterator::IntoIter<()> as Iterator>::Item>
- 16..66 'for _ ... }': Option<Iterator::Item<IntoIterator::IntoIter<()>>>
+ 16..66 'for _ ... }': Option<IntoIterator::Item<()>>
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
- 20..21 '_': Iterator::Item<IntoIterator::IntoIter<()>>
+ 20..21 '_': IntoIterator::Item<()>
25..39 '{ let x = 0; }': ()
31..32 'x': i32
35..36 '0': i32
@@ -1267,6 +1267,8 @@ fn test() {
"#,
expect![[r#"
10..59 '{ ... } }': ()
+ 16..57 'while ... }': !
+ 16..57 'while ... }': ()
16..57 'while ... }': ()
22..30 '{ true }': bool
24..28 'true': bool
@@ -1978,3 +1980,23 @@ fn x(a: [i32; 4]) {
"#,
);
}
+
+#[test]
+fn dont_unify_on_casts() {
+ // #15246
+ check_types(
+ r#"
+fn unify(_: [bool; 1]) {}
+fn casted(_: *const bool) {}
+fn default<T>() -> T { loop {} }
+
+fn test() {
+ let foo = default();
+ //^^^ [bool; 1]
+
+ casted(&foo as *const _);
+ unify(foo);
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index a0ff62843..2ad7946c8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -3513,7 +3513,6 @@ fn func() {
);
}
-// FIXME
#[test]
fn castable_to() {
check_infer(
@@ -3538,10 +3537,10 @@ fn func() {
120..122 '{}': ()
138..184 '{ ...0]>; }': ()
148..149 'x': Box<[i32; 0]>
- 152..160 'Box::new': fn new<[{unknown}; 0]>([{unknown}; 0]) -> Box<[{unknown}; 0]>
- 152..164 'Box::new([])': Box<[{unknown}; 0]>
+ 152..160 'Box::new': fn new<[i32; 0]>([i32; 0]) -> Box<[i32; 0]>
+ 152..164 'Box::new([])': Box<[i32; 0]>
152..181 'Box::n...2; 0]>': Box<[i32; 0]>
- 161..163 '[]': [{unknown}; 0]
+ 161..163 '[]': [i32; 0]
"#]],
);
}
@@ -3578,6 +3577,21 @@ fn f<T>(t: Ark<T>) {
}
#[test]
+fn ref_to_array_to_ptr_cast() {
+ check_types(
+ r#"
+fn default<T>() -> T { loop {} }
+fn foo() {
+ let arr = [default()];
+ //^^^ [i32; 1]
+ let ref_to_arr = &arr;
+ let casted = ref_to_arr as *const i32;
+}
+"#,
+ );
+}
+
+#[test]
fn const_dependent_on_local() {
check_types(
r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index 97ae732a9..542df8b34 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -4149,6 +4149,30 @@ where
}
#[test]
+fn gats_in_bounds_for_assoc() {
+ check_types(
+ r#"
+trait Trait {
+ type Assoc: Another<Gat<i32> = usize>;
+ type Assoc2<T>: Another<Gat<T> = T>;
+}
+trait Another {
+ type Gat<T>;
+ fn foo(&self) -> Self::Gat<i32>;
+ fn bar<T>(&self) -> Self::Gat<T>;
+}
+
+fn test<T: Trait>(a: T::Assoc, b: T::Assoc2<isize>) {
+ let v = a.foo();
+ //^ usize
+ let v = b.bar::<isize>();
+ //^ isize
+}
+"#,
+ );
+}
+
+#[test]
fn bin_op_with_scalar_fallback() {
// Extra impls are significant so that chalk doesn't give us definite guidances.
check_types(
@@ -4410,3 +4434,47 @@ fn test(v: S<i32>) {
"#,
);
}
+
+#[test]
+fn associated_type_in_argument() {
+ check(
+ r#"
+ trait A {
+ fn m(&self) -> i32;
+ }
+
+ fn x<T: B>(k: &<T as B>::Ty) {
+ k.m();
+ }
+
+ struct X;
+ struct Y;
+
+ impl A for X {
+ fn m(&self) -> i32 {
+ 8
+ }
+ }
+
+ impl A for Y {
+ fn m(&self) -> i32 {
+ 32
+ }
+ }
+
+ trait B {
+ type Ty: A;
+ }
+
+ impl B for u16 {
+ type Ty = X;
+ }
+
+ fn ttt() {
+ let inp = Y;
+ x::<u16>(&inp);
+ //^^^^ expected &X, got &Y
+ }
+ "#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index f40b7db3a..3c7cfbaed 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -170,7 +170,7 @@ fn solve(
struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>);
-impl<'a> Drop for LoggingRustIrDatabaseLoggingOnDrop<'a> {
+impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> {
fn drop(&mut self) {
eprintln!("chalk program:\n{}", self.0);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 363658063..75b8b9afa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -28,14 +28,15 @@ use intern::Interned;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
use stdx::never;
+use triomphe::Arc;
use crate::{
consteval::unknown_const,
db::HirDatabase,
layout::{Layout, TagEncoding},
mir::pad16,
- ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
- Ty, WhereClause,
+ ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitEnvironment,
+ TraitRef, TraitRefExt, Ty, WhereClause,
};
pub(crate) fn fn_traits(
@@ -89,7 +90,7 @@ struct SuperTraits<'a> {
seen: FxHashSet<ChalkTraitId>,
}
-impl<'a> SuperTraits<'a> {
+impl SuperTraits<'_> {
fn elaborate(&mut self, trait_ref: &TraitRef) {
direct_super_trait_refs(self.db, trait_ref, |trait_ref| {
if !self.seen.contains(&trait_ref.trait_id) {
@@ -99,7 +100,7 @@ impl<'a> SuperTraits<'a> {
}
}
-impl<'a> Iterator for SuperTraits<'a> {
+impl Iterator for SuperTraits<'_> {
type Item = TraitRef;
fn next(&mut self) -> Option<Self::Item> {
@@ -417,7 +418,7 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
) -> Result<Const, Self::Error> {
if let chalk_ir::ConstValue::Concrete(c) = &constant.data(Interner).value {
if let ConstScalar::UnevaluatedConst(id, subst) = &c.interned {
- if let Ok(eval) = self.db.const_eval(*id, subst.clone()) {
+ if let Ok(eval) = self.db.const_eval(*id, subst.clone(), None) {
return Ok(eval);
} else {
return Ok(unknown_const(constant.data(Interner).ty.clone()));
@@ -431,10 +432,11 @@ impl FallibleTypeFolder<Interner> for UnevaluatedConstEvaluatorFolder<'_> {
pub(crate) fn detect_variant_from_bytes<'a>(
layout: &'a Layout,
db: &dyn HirDatabase,
- krate: CrateId,
+ trait_env: Arc<TraitEnvironment>,
b: &[u8],
e: EnumId,
) -> Option<(LocalEnumVariantId, &'a Layout)> {
+ let krate = trait_env.krate;
let (var_id, var_layout) = match &layout.variants {
hir_def::layout::Variants::Single { index } => (index.0, &*layout),
hir_def::layout::Variants::Multiple { tag, tag_encoding, variants, .. } => {
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
index a20aff93f..f860ee948 100644
--- a/src/tools/rust-analyzer/crates/hir/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -18,7 +18,7 @@ arrayvec = "0.7.2"
itertools = "0.10.5"
smallvec.workspace = true
triomphe.workspace = true
-once_cell = "1.17.0"
+once_cell = "1.17.1"
# local deps
base-db.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
index b81793729..0f2fb2c81 100644
--- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs
@@ -12,9 +12,9 @@ use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
use crate::{
- Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
- Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias, TypeParam, Union,
- Variant,
+ Adt, AssocItem, Const, ConstParam, Enum, ExternCrateDecl, Field, Function, GenericParam, Impl,
+ LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Trait, TraitAlias, TypeAlias,
+ TypeParam, Union, Variant,
};
pub trait HasAttrs {
@@ -120,6 +120,39 @@ impl HasAttrs for AssocItem {
}
}
+impl HasAttrs for ExternCrateDecl {
+ fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
+ let def = AttrDefId::ExternCrateId(self.into());
+ db.attrs_with_owner(def)
+ }
+ fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
+ let crate_docs = self.resolved_crate(db)?.root_module().attrs(db).docs().map(String::from);
+ let def = AttrDefId::ExternCrateId(self.into());
+ let decl_docs = db.attrs(def).docs().map(String::from);
+ match (decl_docs, crate_docs) {
+ (None, None) => None,
+ (Some(decl_docs), None) => Some(decl_docs),
+ (None, Some(crate_docs)) => Some(crate_docs),
+ (Some(mut decl_docs), Some(crate_docs)) => {
+ decl_docs.push('\n');
+ decl_docs.push('\n');
+ decl_docs += &crate_docs;
+ Some(decl_docs)
+ }
+ }
+ .map(Documentation::new)
+ }
+ fn resolve_doc_path(
+ self,
+ db: &dyn HirDatabase,
+ link: &str,
+ ns: Option<Namespace>,
+ ) -> Option<ModuleDef> {
+ let def = AttrDefId::ExternCrateId(self.into());
+ resolve_doc_path(db, def, link, ns).map(ModuleDef::from)
+ }
+}
+
/// Resolves the item `link` points to in the scope of `def`.
fn resolve_doc_path(
db: &dyn HirDatabase,
@@ -140,7 +173,9 @@ fn resolve_doc_path(
AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
AttrDefId::ImplId(it) => it.resolver(db.upcast()),
AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
+ AttrDefId::UseId(it) => it.resolver(db.upcast()),
AttrDefId::MacroId(it) => it.resolver(db.upcast()),
+ AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()),
AttrDefId::GenericParamId(it) => match it {
GenericParamId::TypeParamId(it) => it.parent(),
GenericParamId::ConstParamId(it) => it.parent(),
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index e0cde689f..936581bfe 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -5,13 +5,8 @@
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_expand::db::{
- AstIdMapQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, HygieneFrameQuery,
- InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandQuery,
- ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
+ AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
+ ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
+ MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
};
pub use hir_ty::db::*;
-
-#[test]
-fn hir_database_is_object_safe() {
- fn _assert_object_safe(_: &dyn HirDatabase) {}
-}
diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
index b64d81490..80c3bcdca 100644
--- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs
@@ -3,7 +3,7 @@
//!
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
-pub use hir_ty::diagnostics::{IncoherentImpl, IncorrectCase};
+pub use hir_ty::diagnostics::{CaseType, IncoherentImpl, IncorrectCase};
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index 9a2090ab7..9dfb98e45 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -18,9 +18,9 @@ use hir_ty::{
};
use crate::{
- Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, Field, Function, GenericParam,
- HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct, Trait, TraitAlias,
- TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
+ Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field,
+ Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, Static, Struct,
+ Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
};
impl HirDisplay for Function {
@@ -238,6 +238,18 @@ impl HirDisplay for Type {
}
}
+impl HirDisplay for ExternCrateDecl {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
+ f.write_str("extern crate ")?;
+ write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
+ if let Some(alias) = self.alias(f.db) {
+ write!(f, " as {alias}",)?;
+ }
+ Ok(())
+ }
+}
+
impl HirDisplay for GenericParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
@@ -251,8 +263,8 @@ impl HirDisplay for GenericParam {
impl HirDisplay for TypeOrConstParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self.split(f.db) {
- either::Either::Left(x) => x.hir_fmt(f),
- either::Either::Right(x) => x.hir_fmt(f),
+ either::Either::Left(it) => it.hir_fmt(f),
+ either::Either::Right(it) => it.hir_fmt(f),
}
}
}
@@ -303,11 +315,11 @@ fn write_generic_params(
) -> Result<(), HirDisplayError> {
let params = f.db.generic_params(def);
if params.lifetimes.is_empty()
- && params.type_or_consts.iter().all(|x| x.1.const_param().is_none())
+ && params.type_or_consts.iter().all(|it| it.1.const_param().is_none())
&& params
.type_or_consts
.iter()
- .filter_map(|x| x.1.type_param())
+ .filter_map(|it| it.1.type_param())
.all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList))
{
return Ok(());
diff --git a/src/tools/rust-analyzer/crates/hir/src/from_id.rs b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
index de2390219..fc4bbffdb 100644
--- a/src/tools/rust-analyzer/crates/hir/src/from_id.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/from_id.rs
@@ -15,7 +15,7 @@ use crate::{
};
macro_rules! from_id {
- ($(($id:path, $ty:path)),*) => {$(
+ ($(($id:path, $ty:path)),* $(,)?) => {$(
impl From<$id> for $ty {
fn from(id: $id) -> $ty {
$ty { id }
@@ -47,7 +47,8 @@ from_id![
(hir_def::TypeParamId, crate::TypeParam),
(hir_def::ConstParamId, crate::ConstParam),
(hir_def::LifetimeParamId, crate::LifetimeParam),
- (hir_def::MacroId, crate::Macro)
+ (hir_def::MacroId, crate::Macro),
+ (hir_def::ExternCrateId, crate::ExternCrateDecl),
];
impl From<AdtId> for Adt {
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
index 9f6b5c0a9..31cf8ba33 100644
--- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -1,18 +1,19 @@
//! Provides set of implementation for hir's objects that allows get back location in file.
+use base_db::FileId;
use either::Either;
use hir_def::{
nameres::{ModuleOrigin, ModuleSource},
src::{HasChildSource, HasSource as _},
Lookup, MacroId, VariantId,
};
-use hir_expand::InFile;
+use hir_expand::{HirFileId, InFile};
use syntax::ast;
use crate::{
- db::HirDatabase, Adt, Const, Enum, Field, FieldSource, Function, Impl, LifetimeParam,
- LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam,
- Union, Variant,
+ db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
+ LifetimeParam, LocalSource, Macro, Module, Static, Struct, Trait, TraitAlias, TypeAlias,
+ TypeOrConstParam, Union, Variant,
};
pub trait HasSource {
@@ -20,6 +21,10 @@ pub trait HasSource {
/// Fetches the definition's source node.
/// Using [`crate::Semantics::source`] is preferred when working with [`crate::Semantics`],
/// as that caches the parsed file in the semantics' cache.
+ ///
+ /// The current some implementations can return `InFile` instead of `Option<InFile>`.
+ /// But we made this method `Option` to support rlib in the future
+ /// by https://github.com/rust-lang/rust-analyzer/issues/6913
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
}
@@ -32,6 +37,11 @@ impl Module {
def_map[self.id.local_id].definition_source(db.upcast())
}
+ pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].definition_source_file_id()
+ }
+
pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
let def_map = self.id.def_map(db.upcast());
match def_map[self.id.local_id].origin {
@@ -40,6 +50,16 @@ impl Module {
}
}
+ pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<FileId> {
+ let def_map = self.id.def_map(db.upcast());
+ match def_map[self.id.local_id].origin {
+ ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => {
+ Some(definition)
+ }
+ _ => None,
+ }
+ }
+
pub fn is_inline(self, db: &dyn HirDatabase) -> bool {
let def_map = self.id.def_map(db.upcast());
def_map[self.id.local_id].origin.is_inline()
@@ -187,3 +207,11 @@ impl HasSource for LocalSource {
Some(self.source)
}
}
+
+impl HasSource for ExternCrateDecl {
+ type Ast = ast::ExternCrate;
+
+ fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
+ Some(self.id.lookup(db.upcast()).source(db.upcast()))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 6df625380..bf041b61f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -47,22 +47,22 @@ use hir_def::{
lang_item::LangItemTarget,
layout::{self, ReprOptions, TargetDataLayout},
macro_id_to_def_id,
- nameres::{self, diagnostics::DefDiagnostic, ModuleOrigin},
+ nameres::{self, diagnostics::DefDiagnostic},
+ path::ImportAlias,
per_ns::PerNs,
resolver::{HasResolver, Resolver},
src::HasSource as _,
- AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
- EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, InTypeConstId, ItemContainerId,
- LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId,
- StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId,
- UnionId,
+ AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
+ EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, HasModule, ImplId,
+ InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup,
+ MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId,
+ TypeOrConstParamId, TypeParamId, UnionId,
};
use hir_expand::{name::name, MacroCallKind};
use hir_ty::{
all_super_traits, autoderef,
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
- display::HexifiedConst,
layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
mir::{self, interpret_mir},
@@ -89,11 +89,11 @@ use crate::db::{DefDatabase, HirDatabase};
pub use crate::{
attrs::{HasAttrs, Namespace},
diagnostics::{
- AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncoherentImpl,
- IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError, MacroExpansionParseError,
- MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms, MissingUnsafe,
- MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField,
- ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
+ AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode,
+ IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError,
+ MacroExpansionParseError, MalformedDerive, MismatchedArgCount, MissingFields,
+ MissingMatchArms, MissingUnsafe, MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem,
+ PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
UnresolvedProcMacro, UnusedMut,
@@ -201,9 +201,8 @@ impl Crate {
db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
}
- pub fn root_module(self, db: &dyn HirDatabase) -> Module {
- let def_map = db.crate_def_map(self.id);
- Module { id: def_map.crate_root().into() }
+ pub fn root_module(self) -> Module {
+ Module { id: CrateRootModuleId::from(self.id).into() }
}
pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
@@ -248,7 +247,7 @@ impl Crate {
/// Try to get the root URL of the documentation of a crate.
pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
// Look for #![doc(html_root_url = "...")]
- let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
+ let attrs = db.attrs(AttrDefId::ModuleId(self.root_module().into()));
let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
}
@@ -379,11 +378,6 @@ impl ModuleDef {
ModuleDef::BuiltinType(_) | ModuleDef::Macro(_) => return Vec::new(),
};
- let module = match self.module(db) {
- Some(it) => it,
- None => return Vec::new(),
- };
-
let mut acc = Vec::new();
match self.as_def_with_body() {
@@ -391,7 +385,7 @@ impl ModuleDef {
def.diagnostics(db, &mut acc);
}
None => {
- for diag in hir_ty::diagnostics::incorrect_case(db, module.id.krate(), id) {
+ for diag in hir_ty::diagnostics::incorrect_case(db, id) {
acc.push(diag.into())
}
}
@@ -505,15 +499,10 @@ impl Module {
/// Finds nearest non-block ancestor `Module` (`self` included).
pub fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module {
let mut id = self.id;
- loop {
- let def_map = id.def_map(db.upcast());
- let origin = def_map[id.local_id].origin;
- if matches!(origin, ModuleOrigin::BlockExpr { .. }) {
- id = id.containing_module(db.upcast()).expect("block without parent module")
- } else {
- return Module { id };
- }
+ while id.is_block_module() {
+ id = id.containing_module(db.upcast()).expect("block without parent module");
}
+ Module { id }
}
pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
@@ -619,15 +608,21 @@ impl Module {
let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
for impl_def in self.impl_defs(db) {
+ let loc = impl_def.id.lookup(db.upcast());
+ let tree = loc.id.item_tree(db.upcast());
+ let node = &tree[loc.id.value];
+ let file_id = loc.id.file_id();
+ if file_id.is_builtin_derive(db.upcast()) {
+ // these expansion come from us, diagnosing them is a waste of resources
+ // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
+ continue;
+ }
+
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
- let loc = impl_def.id.lookup(db.upcast());
- let tree = loc.id.item_tree(db.upcast());
- let node = &tree[loc.id.value];
- let file_id = loc.id.file_id();
let ast_id_map = db.ast_id_map(file_id);
acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into())
@@ -698,16 +693,18 @@ impl Module {
fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) {
let id = macro_id_to_def_id(db.upcast(), m.id);
- if let Err(e) = db.macro_def(id) {
- let Some(ast) = id.ast_id().left() else {
- never!("MacroDefError for proc-macro: {:?}", e);
+ if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) {
+ if let Some(e) = expander.mac.err() {
+ let Some(ast) = id.ast_id().left() else {
+ never!("declarative expander for non decl-macro: {:?}", e);
return;
};
- emit_def_diagnostic_(
- db,
- acc,
- &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
- );
+ emit_def_diagnostic_(
+ db,
+ acc,
+ &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
+ );
+ }
}
}
@@ -753,7 +750,7 @@ fn emit_def_diagnostic_(
let item = ast.to_node(db.upcast());
acc.push(
InactiveCode {
- node: ast.with_value(AstPtr::new(&item).into()),
+ node: ast.with_value(SyntaxNodePtr::new(&item).into()),
cfg: cfg.clone(),
opts: opts.clone(),
}
@@ -963,8 +960,15 @@ impl Field {
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
- db.layout_of_ty(self.ty(db).ty.clone(), self.parent.module(db).krate().into())
- .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).into()).unwrap()))
+ db.layout_of_ty(
+ self.ty(db).ty.clone(),
+ db.trait_environment(match hir_def::VariantId::from(self.parent) {
+ hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id),
+ hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
+ hir_def::VariantId::UnionId(id) => GenericDefId::AdtId(id.into()),
+ }),
+ )
+ .map(|layout| Layout(layout, db.target_data_layout(self.krate(db).into()).unwrap()))
}
pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
@@ -1234,7 +1238,7 @@ impl Adt {
pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
let subst = db.generic_defaults(self.into());
subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
- GenericArgData::Ty(x) => x.is_unknown(),
+ GenericArgData::Ty(it) => it.is_unknown(),
_ => false,
})
}
@@ -1244,8 +1248,12 @@ impl Adt {
return Err(LayoutError::HasPlaceholder);
}
let krate = self.krate(db).id;
- db.layout_of_adt(self.into(), Substitution::empty(Interner), krate)
- .map(|layout| Layout(layout, db.target_data_layout(krate).unwrap()))
+ db.layout_of_adt(
+ self.into(),
+ Substitution::empty(Interner),
+ db.trait_environment(self.into()),
+ )
+ .map(|layout| Layout(layout, db.target_data_layout(krate).unwrap()))
}
/// Turns this ADT into a type. Any type parameters of the ADT will be
@@ -1635,11 +1643,11 @@ impl DefWithBody {
for moof in &borrowck_result.moved_out_of_ref {
let span: InFile<SyntaxNodePtr> = match moof.span {
mir::MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
- Ok(s) => s.map(|x| x.into()),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) {
- Ok(s) => s.map(|x| match x {
+ Ok(s) => s.map(|it| match it {
Either::Left(e) => e.into(),
Either::Right(e) => e.into(),
}),
@@ -1661,6 +1669,14 @@ impl DefWithBody {
let Some(&local) = mir_body.binding_locals.get(binding_id) else {
continue;
};
+ if body[binding_id]
+ .definitions
+ .iter()
+ .any(|&pat| source_map.pat_syntax(pat).is_err())
+ {
+ // Skip synthetic bindings
+ continue;
+ }
let need_mut = &mol[local];
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
@@ -1670,11 +1686,11 @@ impl DefWithBody {
for span in spans {
let span: InFile<SyntaxNodePtr> = match span {
mir::MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {
- Ok(s) => s.map(|x| x.into()),
+ Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
- Ok(s) => s.map(|x| match x {
+ Ok(s) => s.map(|it| match it {
Either::Left(e) => e.into(),
Either::Right(e) => e.into(),
}),
@@ -1687,7 +1703,7 @@ impl DefWithBody {
}
(mir::MutabilityReason::Not, true) => {
if !infer.mutated_bindings_in_closure.contains(&binding_id) {
- let should_ignore = matches!(body[binding_id].name.as_str(), Some(x) if x.starts_with("_"));
+ let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
if !should_ignore {
acc.push(UnusedMut { local }.into())
}
@@ -1810,7 +1826,7 @@ impl DefWithBody {
// FIXME: don't ignore diagnostics for in type const
DefWithBody::InTypeConst(_) => return,
};
- for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
+ for diag in hir_ty::diagnostics::incorrect_case(db, def.into()) {
acc.push(diag.into())
}
}
@@ -1919,6 +1935,21 @@ impl Function {
db.function_data(self.id).has_async_kw()
}
+ /// Does this function have `#[test]` attribute?
+ pub fn is_test(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).attrs.is_test()
+ }
+
+ /// Does this function have the ignore attribute?
+ pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).attrs.is_ignore()
+ }
+
+ /// Does this function have `#[bench]` attribute?
+ pub fn is_bench(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).attrs.is_bench()
+ }
+
pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
hir_ty::is_fn_unsafe_to_call(db, self.id)
}
@@ -1962,7 +1993,7 @@ impl Function {
return r;
}
};
- let (result, stdout, stderr) = interpret_mir(db, &body, false);
+ let (result, stdout, stderr) = interpret_mir(db, body, false, None);
let mut text = match result {
Ok(_) => "pass".to_string(),
Err(e) => {
@@ -2098,6 +2129,47 @@ impl HasVisibility for Function {
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ExternCrateDecl {
+ pub(crate) id: ExternCrateId,
+}
+
+impl ExternCrateDecl {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.module(db.upcast()).into()
+ }
+
+ pub fn resolved_crate(self, db: &dyn HirDatabase) -> Option<Crate> {
+ db.extern_crate_decl_data(self.id).crate_id.map(Into::into)
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.extern_crate_decl_data(self.id).name.clone()
+ }
+
+ pub fn alias(self, db: &dyn HirDatabase) -> Option<ImportAlias> {
+ db.extern_crate_decl_data(self.id).alias.clone()
+ }
+
+ /// Returns the name under which this crate is made accessible, taking `_` into account.
+ pub fn alias_or_name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let extern_crate_decl_data = db.extern_crate_decl_data(self.id);
+ match &extern_crate_decl_data.alias {
+ Some(ImportAlias::Underscore) => None,
+ Some(ImportAlias::Alias(alias)) => Some(alias.clone()),
+ None => Some(extern_crate_decl_data.name.clone()),
+ }
+ }
+}
+
+impl HasVisibility for ExternCrateDecl {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.extern_crate_decl_data(self.id)
+ .visibility
+ .resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct InTypeConst {
pub(crate) id: InTypeConstId,
}
@@ -2131,8 +2203,28 @@ impl Const {
}
pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
- let c = db.const_eval(self.id.into(), Substitution::empty(Interner))?;
- let r = format!("{}", HexifiedConst(c).display(db));
+ let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
+ let data = &c.data(Interner);
+ if let TyKind::Scalar(s) = data.ty.kind(Interner) {
+ if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
+ if let hir_ty::ConstValue::Concrete(c) = &data.value {
+ if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned {
+ let value = u128::from_le_bytes(mir::pad16(b, false));
+ let value_signed =
+ i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
+ if value >= 10 {
+ return Ok(format!("{} ({:#X})", value_signed, value));
+ } else {
+ return Ok(format!("{}", value_signed));
+ }
+ }
+ }
+ }
+ }
+ if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) {
+ return Ok(s);
+ }
+ let r = format!("{}", c.display(db));
return Ok(r);
}
}
@@ -2270,7 +2362,7 @@ impl TypeAlias {
pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
let subst = db.generic_defaults(self.id.into());
subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
- GenericArgData::Ty(x) => x.is_unknown(),
+ GenericArgData::Ty(it) => it.is_unknown(),
_ => false,
})
}
@@ -2660,8 +2752,8 @@ impl GenericDef {
let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
match toc.split(db) {
- Either::Left(x) => GenericParam::ConstParam(x),
- Either::Right(x) => GenericParam::TypeParam(x),
+ Either::Left(it) => GenericParam::ConstParam(it),
+ Either::Right(it) => GenericParam::TypeParam(it),
}
});
self.lifetime_params(db)
@@ -2709,14 +2801,14 @@ pub struct LocalSource {
impl LocalSource {
pub fn as_ident_pat(&self) -> Option<&ast::IdentPat> {
match &self.source.value {
- Either::Left(x) => Some(x),
+ Either::Left(it) => Some(it),
Either::Right(_) => None,
}
}
pub fn into_ident_pat(self) -> Option<ast::IdentPat> {
match self.source.value {
- Either::Left(x) => Some(x),
+ Either::Left(it) => Some(it),
Either::Right(_) => None,
}
}
@@ -2738,7 +2830,7 @@ impl LocalSource {
}
pub fn syntax_ptr(self) -> InFile<SyntaxNodePtr> {
- self.source.map(|x| SyntaxNodePtr::new(x.syntax()))
+ self.source.map(|it| SyntaxNodePtr::new(it.syntax()))
}
}
@@ -2797,13 +2889,13 @@ impl Local {
Type::new(db, def, ty)
}
- /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = x;`
+ /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = it;`
pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> {
let (body, source_map) = db.body_with_source_map(self.parent);
self.sources_(db, &body, &source_map).collect()
}
- /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;`
+ /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = it;`
pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource {
let (body, source_map) = db.body_with_source_map(self.parent);
let src = self.sources_(db, &body, &source_map).next().unwrap();
@@ -3057,7 +3149,9 @@ impl TypeParam {
let subst = TyBuilder::placeholder_subst(db, self.id.parent());
let ty = ty.substitute(Interner, &subst);
match ty.data(Interner) {
- GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
+ GenericArgData::Ty(it) => {
+ Some(Type::new_with_resolver_inner(db, &resolver, it.clone()))
+ }
_ => None,
}
}
@@ -3096,7 +3190,7 @@ impl ConstParam {
pub fn name(self, db: &dyn HirDatabase) -> Name {
let params = db.generic_params(self.id.parent());
match params.type_or_consts[self.id.local_id()].name() {
- Some(x) => x.clone(),
+ Some(it) => it.clone(),
None => {
never!();
Name::missing()
@@ -3153,8 +3247,8 @@ impl TypeOrConstParam {
pub fn ty(self, db: &dyn HirDatabase) -> Type {
match self.split(db) {
- Either::Left(x) => x.ty(db),
- Either::Right(x) => x.ty(db),
+ Either::Left(it) => it.ty(db),
+ Either::Right(it) => it.ty(db),
}
}
}
@@ -3260,9 +3354,9 @@ impl Impl {
self.id.lookup(db.upcast()).container.into()
}
- pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
let src = self.source(db)?;
- src.file_id.is_builtin_derive(db.upcast())
+ src.file_id.as_builtin_derive_attr_node(db.upcast())
}
}
@@ -3652,9 +3746,9 @@ impl Type {
};
let parent_subst = TyBuilder::subst_for_def(db, trait_id, None)
.push(self.ty.clone())
- .fill(|x| {
+ .fill(|it| {
// FIXME: this code is not covered in tests.
- match x {
+ match it {
ParamKind::Type => {
GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
}
@@ -3821,7 +3915,7 @@ impl Type {
pub fn as_array(&self, db: &dyn HirDatabase) -> Option<(Type, usize)> {
if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
- try_const_usize(db, len).map(|x| (self.derived(ty.clone()), x as usize))
+ try_const_usize(db, len).map(|it| (self.derived(ty.clone()), it as usize))
} else {
None
}
@@ -4275,7 +4369,7 @@ impl Type {
}
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
- db.layout_of_ty(self.ty.clone(), self.env.krate)
+ db.layout_of_ty(self.ty.clone(), self.env.clone())
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
}
}
@@ -4662,6 +4756,12 @@ pub trait HasContainer {
fn container(&self, db: &dyn HirDatabase) -> ItemContainer;
}
+impl HasContainer for ExternCrateDecl {
+ fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
+ container_id_to_hir(self.id.lookup(db.upcast()).container.into())
+ }
+}
+
impl HasContainer for Module {
fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
// FIXME: handle block expressions as modules (their parent is in a different DefMap)
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 5a76a9185..e99d2984c 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -15,11 +15,7 @@ use hir_def::{
type_ref::Mutability,
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
};
-use hir_expand::{
- db::ExpandDatabase,
- name::{known, AsName},
- ExpansionInfo, MacroCallId,
-};
+use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
@@ -439,10 +435,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_path(path)
}
- pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
- self.imp.resolve_extern_crate(extern_crate)
- }
-
pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
self.imp.resolve_variant(record_lit).map(VariantDef::from)
}
@@ -1242,18 +1234,6 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(path.syntax())?.resolve_path(self.db, path)
}
- fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
- let krate = self.scope(extern_crate.syntax())?.krate();
- let name = extern_crate.name_ref()?.as_name();
- if name == known::SELF_PARAM {
- return Some(krate);
- }
- krate
- .dependencies(self.db)
- .into_iter()
- .find_map(|dep| (dep.name == name).then_some(dep.krate))
- }
-
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
}
@@ -1494,7 +1474,11 @@ impl<'db> SemanticsImpl<'db> {
}
fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
- let Some(enclosing_item) = expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast) else { return false };
+ let Some(enclosing_item) =
+ expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast)
+ else {
+ return false;
+ };
let def = match &enclosing_item {
Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
@@ -1599,6 +1583,7 @@ to_def_impls![
(crate::Local, ast::SelfParam, self_param_to_def),
(crate::Label, ast::Label, label_to_def),
(crate::Adt, ast::Adt, adt_to_def),
+ (crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
];
fn find_root(node: &SyntaxNode) -> SyntaxNode {
@@ -1631,7 +1616,7 @@ pub struct SemanticsScope<'a> {
resolver: Resolver,
}
-impl<'a> SemanticsScope<'a> {
+impl SemanticsScope<'_> {
pub fn module(&self) -> Module {
Module { id: self.resolver.module() }
}
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index c50ffa4f8..aabda3655 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -93,9 +93,9 @@ use hir_def::{
DynMap,
},
hir::{BindingId, LabelId},
- AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FieldId, FunctionId,
- GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
- TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
+ AdtId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FieldId,
+ FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
+ StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
use rustc_hash::FxHashMap;
@@ -203,6 +203,16 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<EnumVariantId> {
self.to_def(src, keys::VARIANT)
}
+ pub(super) fn extern_crate_to_def(
+ &mut self,
+ src: InFile<ast::ExternCrate>,
+ ) -> Option<ExternCrateId> {
+ self.to_def(src, keys::EXTERN_CRATE)
+ }
+ #[allow(dead_code)]
+ pub(super) fn use_to_def(&mut self, src: InFile<ast::Use>) -> Option<UseId> {
+ self.to_def(src, keys::USE)
+ }
pub(super) fn adt_to_def(
&mut self,
InFile { file_id, value }: InFile<ast::Adt>,
@@ -298,7 +308,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|x| TypeParamId::from_unchecked(x))
+ dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|it| TypeParamId::from_unchecked(it))
}
pub(super) fn lifetime_param_to_def(
@@ -316,7 +326,10 @@ impl SourceToDefCtx<'_, '_> {
) -> Option<ConstParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(|x| ConstParamId::from_unchecked(x))
+ dyn_map[keys::CONST_PARAM]
+ .get(&src.value)
+ .copied()
+ .map(|it| ConstParamId::from_unchecked(it))
}
pub(super) fn generic_param_to_def(
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index ecb1b306a..3499daf11 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -832,7 +832,7 @@ impl SourceAnalyzer {
None => return func,
};
let env = db.trait_environment_for_body(owner);
- method_resolution::lookup_impl_method(db, env, func, substs).0
+ db.lookup_impl_method(env, func, substs).0
}
fn resolve_impl_const_or_trait_def(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index d07c63726..6aca716bb 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -3,7 +3,10 @@ use syntax::ast::{self, make, AstNode};
use crate::{
assist_context::{AssistContext, Assists},
- utils::{add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, DefaultMethods,
+ IgnoreAssocItems,
+ },
AssistId, AssistKind,
};
@@ -43,6 +46,7 @@ pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext<'_
acc,
ctx,
DefaultMethods::No,
+ IgnoreAssocItems::DocHiddenAttrPresent,
"add_impl_missing_members",
"Implement missing members",
)
@@ -87,6 +91,7 @@ pub(crate) fn add_missing_default_members(
acc,
ctx,
DefaultMethods::Only,
+ IgnoreAssocItems::DocHiddenAttrPresent,
"add_impl_default_members",
"Implement default members",
)
@@ -96,6 +101,7 @@ fn add_missing_impl_members_inner(
acc: &mut Assists,
ctx: &AssistContext<'_>,
mode: DefaultMethods,
+ ignore_items: IgnoreAssocItems,
assist_id: &'static str,
label: &'static str,
) -> Option<()> {
@@ -115,10 +121,21 @@ fn add_missing_impl_members_inner(
let trait_ref = impl_.trait_ref(ctx.db())?;
let trait_ = trait_ref.trait_();
+ let mut ign_item = ignore_items;
+
+ if let IgnoreAssocItems::DocHiddenAttrPresent = ignore_items {
+ // Relax condition for local crates.
+ let db = ctx.db();
+ if trait_.module(db).krate().origin(db).is_local() {
+ ign_item = IgnoreAssocItems::No;
+ }
+ }
+
let missing_items = filter_assoc_items(
&ctx.sema,
&ide_db::traits::get_missing_assoc_items(&ctx.sema, &impl_def),
mode,
+ ign_item,
);
if missing_items.is_empty() {
@@ -1966,4 +1983,169 @@ impl AnotherTrait<i32> for () {
"#,
);
}
+
+ #[test]
+ fn doc_hidden_default_impls_ignored() {
+ // doc(hidden) attr is ignored trait and impl both belong to the local crate.
+ check_assist(
+ add_missing_default_members,
+ r#"
+struct Foo;
+trait Trait {
+ #[doc(hidden)]
+ fn func_with_default_impl() -> u32 {
+ 42
+ }
+ fn another_default_impl() -> u32 {
+ 43
+ }
+}
+impl Tra$0it for Foo {}"#,
+ r#"
+struct Foo;
+trait Trait {
+ #[doc(hidden)]
+ fn func_with_default_impl() -> u32 {
+ 42
+ }
+ fn another_default_impl() -> u32 {
+ 43
+ }
+}
+impl Trait for Foo {
+ $0fn func_with_default_impl() -> u32 {
+ 42
+ }
+
+ fn another_default_impl() -> u32 {
+ 43
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_lang_crates() {
+ // Not applicable because Eq has a single method and this has a #[doc(hidden)] attr set.
+ check_assist_not_applicable(
+ add_missing_default_members,
+ r#"
+//- minicore: eq
+use core::cmp::Eq;
+struct Foo;
+impl E$0q for Foo { /* $0 */ }
+"#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_lib_crates() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+ //- /main.rs crate:a deps:b
+ struct B;
+ impl b::Exte$0rnTrait for B {}
+ //- /lib.rs crate:b new_source_root:library
+ pub trait ExternTrait {
+ #[doc(hidden)]
+ fn hidden_default() -> Option<()> {
+ todo!()
+ }
+
+ fn unhidden_default() -> Option<()> {
+ todo!()
+ }
+
+ fn unhidden_nondefault() -> Option<()>;
+ }
+ "#,
+ r#"
+ struct B;
+ impl b::ExternTrait for B {
+ $0fn unhidden_default() -> Option<()> {
+ todo!()
+ }
+ }
+ "#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_local_crates() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+
+struct B;
+impl Loc$0alTrait for B {}
+ "#,
+ r#"
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+
+struct B;
+impl LocalTrait for B {
+ $0fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn doc_hidden_default_impls_workspace_crates() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+//- /lib.rs crate:b new_source_root:local
+trait LocalTrait {
+ #[doc(hidden)]
+ fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+
+//- /main.rs crate:a deps:b
+struct B;
+impl b::Loc$0alTrait for B {}
+ "#,
+ r#"
+struct B;
+impl b::LocalTrait for B {
+ $0fn no_skip_default() -> Option<()> {
+ todo!()
+ }
+
+ fn no_skip_default_2() -> Option<()> {
+ todo!()
+ }
+}
+ "#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 7384390f2..3b162d7c4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -8,10 +8,7 @@ use itertools::Itertools;
use syntax::ast::edit_in_place::Removable;
use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat};
-use crate::{
- utils::{self, render_snippet, Cursor},
- AssistContext, AssistId, AssistKind, Assists,
-};
+use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
// Assist: add_missing_match_arms
//
@@ -40,9 +37,9 @@ use crate::{
pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let match_expr = ctx.find_node_at_offset_with_descend::<ast::MatchExpr>()?;
let match_arm_list = match_expr.match_arm_list()?;
- let target_range = ctx.sema.original_range(match_expr.syntax()).range;
+ let arm_list_range = ctx.sema.original_range_opt(match_arm_list.syntax())?;
- if let None = cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list) {
+ if cursor_at_trivial_match_arm_list(ctx, &match_expr, &match_arm_list).is_none() {
let arm_list_range = ctx.sema.original_range(match_arm_list.syntax()).range;
let cursor_in_range = arm_list_range.contains_range(ctx.selection_trimmed());
if cursor_in_range {
@@ -75,14 +72,18 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.collect();
let module = ctx.sema.scope(expr.syntax())?.module();
- let (mut missing_pats, is_non_exhaustive): (
+ let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
bool,
+ bool,
) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
+ let has_hidden_variants =
+ variants.iter().any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+
let missing_pats = variants
.into_iter()
.filter_map(|variant| {
@@ -101,7 +102,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
} else {
Box::new(missing_pats)
};
- (missing_pats.peekable(), is_non_exhaustive)
+ (missing_pats.peekable(), is_non_exhaustive, has_hidden_variants)
} else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) {
let is_non_exhaustive =
enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db(), module.krate()));
@@ -124,6 +125,12 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
if n_arms > 256 {
return None;
}
+
+ let has_hidden_variants = variants_of_enums
+ .iter()
+ .flatten()
+ .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+
let missing_pats = variants_of_enums
.into_iter()
.multi_cartesian_product()
@@ -139,7 +146,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
+ (
+ (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(),
+ is_non_exhaustive,
+ has_hidden_variants,
+ )
} else if let Some((enum_def, len)) = resolve_array_of_enum_def(&ctx.sema, &expr) {
let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
let variants = enum_def.variants(ctx.db());
@@ -148,6 +159,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
return None;
}
+ let has_hidden_variants =
+ variants.iter().any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+
let variants_of_enums = vec![variants; len];
let missing_pats = variants_of_enums
@@ -164,28 +178,42 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
- ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
+ (
+ (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(),
+ is_non_exhaustive,
+ has_hidden_variants,
+ )
} else {
return None;
};
let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm;
- if !needs_catch_all_arm && missing_pats.peek().is_none() {
+ if !needs_catch_all_arm
+ && ((has_hidden_variants && has_catch_all_arm) || missing_pats.peek().is_none())
+ {
return None;
}
acc.add(
AssistId("add_missing_match_arms", AssistKind::QuickFix),
"Fill match arms",
- target_range,
- |builder| {
+ ctx.sema.original_range(match_expr.syntax()).range,
+ |edit| {
let new_match_arm_list = match_arm_list.clone_for_update();
+
+ // having any hidden variants means that we need a catch-all arm
+ needs_catch_all_arm |= has_hidden_variants;
+
let missing_arms = missing_pats
- .map(|(pat, hidden)| {
- (make::match_arm(iter::once(pat), None, make::ext::expr_todo()), hidden)
+ .filter(|(_, hidden)| {
+ // filter out hidden patterns because they're handled by the catch-all arm
+ !hidden
})
- .map(|(it, hidden)| (it.clone_for_update(), hidden));
+ .map(|(pat, _)| {
+ make::match_arm(iter::once(pat), None, make::ext::expr_todo())
+ .clone_for_update()
+ });
let catch_all_arm = new_match_arm_list
.arms()
@@ -204,15 +232,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
cov_mark::hit!(add_missing_match_arms_empty_expr);
}
}
+
let mut first_new_arm = None;
- for (arm, hidden) in missing_arms {
- if hidden {
- needs_catch_all_arm = !has_catch_all_arm;
- } else {
- first_new_arm.get_or_insert_with(|| arm.clone());
- new_match_arm_list.add_arm(arm);
- }
+ for arm in missing_arms {
+ first_new_arm.get_or_insert_with(|| arm.clone());
+ new_match_arm_list.add_arm(arm);
}
+
if needs_catch_all_arm && !has_catch_all_arm {
cov_mark::hit!(added_wildcard_pattern);
let arm = make::match_arm(
@@ -225,24 +251,38 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
new_match_arm_list.add_arm(arm);
}
- let old_range = ctx.sema.original_range(match_arm_list.syntax()).range;
- match (first_new_arm, ctx.config.snippet_cap) {
- (Some(first_new_arm), Some(cap)) => {
- let extend_lifetime;
- let cursor =
- match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast)
- {
- Some(it) => {
- extend_lifetime = it.syntax().clone();
- Cursor::Replace(&extend_lifetime)
- }
- None => Cursor::Before(first_new_arm.syntax()),
- };
- let snippet = render_snippet(cap, new_match_arm_list.syntax(), cursor);
- builder.replace_snippet(cap, old_range, snippet);
+ if let (Some(first_new_arm), Some(cap)) = (first_new_arm, ctx.config.snippet_cap) {
+ match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast) {
+ Some(it) => edit.add_placeholder_snippet(cap, it),
+ None => edit.add_tabstop_before(cap, first_new_arm),
}
- _ => builder.replace(old_range, new_match_arm_list.to_string()),
}
+
+ // FIXME: Hack for mutable syntax trees not having great support for macros
+ // Just replace the element that the original range came from
+ let old_place = {
+ // Find the original element
+ let file = ctx.sema.parse(arm_list_range.file_id);
+ let old_place = file.syntax().covering_element(arm_list_range.range);
+
+ // Make `old_place` mut
+ match old_place {
+ syntax::SyntaxElement::Node(it) => {
+ syntax::SyntaxElement::from(edit.make_syntax_mut(it))
+ }
+ syntax::SyntaxElement::Token(it) => {
+ // Don't have a way to make tokens mut, so instead make the parent mut
+ // and find the token again
+ let parent = edit.make_syntax_mut(it.parent().unwrap());
+ let mut_token =
+ parent.covering_element(it.text_range()).into_token().unwrap();
+
+ syntax::SyntaxElement::from(mut_token)
+ }
+ }
+ };
+
+ syntax::ted::replace(old_place, new_match_arm_list.syntax());
},
)
}
@@ -1621,10 +1661,9 @@ pub enum E { #[doc(hidden)] A, }
);
}
- // FIXME: I don't think the assist should be applicable in this case
#[test]
fn does_not_fill_wildcard_with_wildcard() {
- check_assist(
+ check_assist_not_applicable(
add_missing_match_arms,
r#"
//- /main.rs crate:main deps:e
@@ -1636,13 +1675,6 @@ fn foo(t: ::e::E) {
//- /e.rs crate:e
pub enum E { #[doc(hidden)] A, }
"#,
- r#"
-fn foo(t: ::e::E) {
- match t {
- _ => todo!(),
- }
-}
-"#,
);
}
@@ -1777,7 +1809,7 @@ fn foo(t: ::e::E, b: bool) {
#[test]
fn does_not_fill_wildcard_with_partial_wildcard_and_wildcard() {
- check_assist(
+ check_assist_not_applicable(
add_missing_match_arms,
r#"
//- /main.rs crate:main deps:e
@@ -1789,14 +1821,6 @@ fn foo(t: ::e::E, b: bool) {
}
//- /e.rs crate:e
pub enum E { #[doc(hidden)] A, }"#,
- r#"
-fn foo(t: ::e::E, b: bool) {
- match t {
- _ if b => todo!(),
- _ => todo!(),
- }
-}
-"#,
);
}
@@ -1897,4 +1921,24 @@ fn foo(t: E) {
}"#,
);
}
+
+ #[test]
+ fn not_applicable_when_match_arm_list_cannot_be_upmapped() {
+ check_assist_not_applicable(
+ add_missing_match_arms,
+ r#"
+macro_rules! foo {
+ ($($t:tt)*) => {
+ $($t)* {}
+ }
+}
+
+enum E { A }
+
+fn main() {
+ foo!(match E::A$0);
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
index acf82e4b2..36f68d176 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -42,7 +42,9 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let name_ref = ast::NameRef::cast(ident.parent()?)?;
let def = match NameRefClass::classify(&ctx.sema, &name_ref)? {
NameRefClass::Definition(def) => def,
- NameRefClass::FieldShorthand { .. } => return None,
+ NameRefClass::FieldShorthand { .. } | NameRefClass::ExternCrateShorthand { .. } => {
+ return None
+ }
};
let fun = match def {
Definition::Function(it) => it,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
index 2b1d8f6f0..e6179ab8b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/change_visibility.rs
@@ -2,9 +2,10 @@ use syntax::{
ast::{self, HasName, HasVisibility},
AstNode,
SyntaxKind::{
- CONST, ENUM, FN, MACRO_DEF, MODULE, STATIC, STRUCT, TRAIT, TYPE_ALIAS, USE, VISIBILITY,
+ self, ASSOC_ITEM_LIST, CONST, ENUM, FN, MACRO_DEF, MODULE, SOURCE_FILE, STATIC, STRUCT,
+ TRAIT, TYPE_ALIAS, USE, VISIBILITY,
},
- T,
+ SyntaxNode, T,
};
use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
@@ -46,13 +47,11 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (offset, target) = if let Some(keyword) = item_keyword {
let parent = keyword.parent()?;
- let def_kws =
- vec![CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT, USE, MACRO_DEF];
- // Parent is not a definition, can't add visibility
- if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
+
+ if !can_add(&parent) {
return None;
}
- // Already have visibility, do nothing
+ // Already has visibility, do nothing
if parent.children().any(|child| child.kind() == VISIBILITY) {
return None;
}
@@ -86,6 +85,29 @@ fn add_vis(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
)
}
+fn can_add(node: &SyntaxNode) -> bool {
+ const LEGAL: &[SyntaxKind] =
+ &[CONST, STATIC, TYPE_ALIAS, FN, MODULE, STRUCT, ENUM, TRAIT, USE, MACRO_DEF];
+
+ LEGAL.contains(&node.kind()) && {
+ let Some(p) = node.parent() else {
+ return false;
+ };
+
+ if p.kind() == ASSOC_ITEM_LIST {
+ p.parent()
+ .and_then(|it| ast::Impl::cast(it))
+ // inherent impls i.e 'non-trait impls' have a non-local
+ // effect, thus can have visibility even when nested.
+ // so filter them out
+ .filter(|imp| imp.for_token().is_none())
+ .is_some()
+ } else {
+ matches!(p.kind(), SOURCE_FILE | MODULE)
+ }
+ }
+}
+
fn change_vis(acc: &mut Assists, vis: ast::Visibility) -> Option<()> {
if vis.syntax().text() == "pub" {
let target = vis.syntax().text_range();
@@ -129,6 +151,16 @@ mod tests {
check_assist(change_visibility, "unsafe f$0n foo() {}", "pub(crate) unsafe fn foo() {}");
check_assist(change_visibility, "$0macro foo() {}", "pub(crate) macro foo() {}");
check_assist(change_visibility, "$0use foo;", "pub(crate) use foo;");
+ check_assist(
+ change_visibility,
+ "impl Foo { f$0n foo() {} }",
+ "impl Foo { pub(crate) fn foo() {} }",
+ );
+ check_assist(
+ change_visibility,
+ "fn bar() { impl Foo { f$0n foo() {} } }",
+ "fn bar() { impl Foo { pub(crate) fn foo() {} } }",
+ );
}
#[test]
@@ -213,4 +245,33 @@ mod tests {
check_assist_target(change_visibility, "pub(crate)$0 fn foo() {}", "pub(crate)");
check_assist_target(change_visibility, "struct S { $0field: u32 }", "field");
}
+
+ #[test]
+ fn not_applicable_for_items_within_traits() {
+ check_assist_not_applicable(change_visibility, "trait Foo { f$0n run() {} }");
+ check_assist_not_applicable(change_visibility, "trait Foo { con$0st FOO: u8 = 69; }");
+ check_assist_not_applicable(change_visibility, "impl Foo for Bar { f$0n quox() {} }");
+ }
+
+ #[test]
+ fn not_applicable_for_items_within_fns() {
+ check_assist_not_applicable(change_visibility, "fn foo() { f$0n inner() {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { unsafe f$0n inner() {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { const f$0n inner() {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { con$0st FOO: u8 = 69; }");
+ check_assist_not_applicable(change_visibility, "fn foo() { en$0um Foo {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { stru$0ct Foo {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { mo$0d foo {} }");
+ check_assist_not_applicable(change_visibility, "fn foo() { $0use foo; }");
+ check_assist_not_applicable(change_visibility, "fn foo() { $0type Foo = Bar<T>; }");
+ check_assist_not_applicable(change_visibility, "fn foo() { tr$0ait Foo {} }");
+ check_assist_not_applicable(
+ change_visibility,
+ "fn foo() { impl Trait for Bar { f$0n bar() {} } }",
+ );
+ check_assist_not_applicable(
+ change_visibility,
+ "fn foo() { impl Trait for Bar { con$0st FOO: u8 = 69; } }",
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
index b1b0f587c..6a5b11f54 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs
@@ -1,3 +1,6 @@
+use hir::Semantics;
+use ide_db::RootDatabase;
+use stdx::format_to;
use syntax::ast::{self, AstNode};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -24,6 +27,7 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
+ use ArmBodyExpression::*;
let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?;
let match_arm_list = match_expr.match_arm_list()?;
let mut arms = match_arm_list.arms();
@@ -33,21 +37,20 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
cov_mark::hit!(non_two_arm_match);
return None;
}
- let first_arm_expr = first_arm.expr();
- let second_arm_expr = second_arm.expr();
+ let first_arm_expr = first_arm.expr()?;
+ let second_arm_expr = second_arm.expr()?;
+ let first_arm_body = is_bool_literal_expr(&ctx.sema, &first_arm_expr)?;
+ let second_arm_body = is_bool_literal_expr(&ctx.sema, &second_arm_expr)?;
- let invert_matches = if is_bool_literal_expr(&first_arm_expr, true)
- && is_bool_literal_expr(&second_arm_expr, false)
- {
- false
- } else if is_bool_literal_expr(&first_arm_expr, false)
- && is_bool_literal_expr(&second_arm_expr, true)
- {
- true
- } else {
+ if !matches!(
+ (&first_arm_body, &second_arm_body),
+ (Literal(true), Literal(false))
+ | (Literal(false), Literal(true))
+ | (Expression(_), Literal(false))
+ ) {
cov_mark::hit!(non_invert_bool_literal_arms);
return None;
- };
+ }
let target_range = ctx.sema.original_range(match_expr.syntax()).range;
let expr = match_expr.expr()?;
@@ -59,28 +62,55 @@ pub(crate) fn convert_two_arm_bool_match_to_matches_macro(
|builder| {
let mut arm_str = String::new();
if let Some(pat) = &first_arm.pat() {
- arm_str += &pat.to_string();
+ format_to!(arm_str, "{pat}");
}
if let Some(guard) = &first_arm.guard() {
arm_str += &format!(" {guard}");
}
- if invert_matches {
- builder.replace(target_range, format!("!matches!({expr}, {arm_str})"));
- } else {
- builder.replace(target_range, format!("matches!({expr}, {arm_str})"));
- }
+
+ let replace_with = match (first_arm_body, second_arm_body) {
+ (Literal(true), Literal(false)) => {
+ format!("matches!({expr}, {arm_str})")
+ }
+ (Literal(false), Literal(true)) => {
+ format!("!matches!({expr}, {arm_str})")
+ }
+ (Expression(body_expr), Literal(false)) => {
+ arm_str.push_str(match &first_arm.guard() {
+ Some(_) => " && ",
+ _ => " if ",
+ });
+ format!("matches!({expr}, {arm_str}{body_expr})")
+ }
+ _ => {
+ unreachable!()
+ }
+ };
+ builder.replace(target_range, replace_with);
},
)
}
-fn is_bool_literal_expr(expr: &Option<ast::Expr>, expect_bool: bool) -> bool {
- if let Some(ast::Expr::Literal(lit)) = expr {
+enum ArmBodyExpression {
+ Literal(bool),
+ Expression(ast::Expr),
+}
+
+fn is_bool_literal_expr(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<ArmBodyExpression> {
+ if let ast::Expr::Literal(lit) = expr {
if let ast::LiteralKind::Bool(b) = lit.kind() {
- return b == expect_bool;
+ return Some(ArmBodyExpression::Literal(b));
}
}
- return false;
+ if !sema.type_of_expr(expr)?.original.is_bool() {
+ return None;
+ }
+
+ Some(ArmBodyExpression::Expression(expr.clone()))
}
#[cfg(test)]
@@ -122,21 +152,6 @@ fn foo(a: Option<u32>) -> bool {
}
#[test]
- fn not_applicable_non_bool_literal_arms() {
- cov_mark::check!(non_invert_bool_literal_arms);
- check_assist_not_applicable(
- convert_two_arm_bool_match_to_matches_macro,
- r#"
-fn foo(a: Option<u32>) -> bool {
- match a$0 {
- Some(val) => val == 3,
- _ => false
- }
-}
- "#,
- );
- }
- #[test]
fn not_applicable_both_false_arms() {
cov_mark::check!(non_invert_bool_literal_arms);
check_assist_not_applicable(
@@ -291,4 +306,40 @@ fn main() {
}",
);
}
+
+ #[test]
+ fn convert_non_literal_bool() {
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn main() {
+ match 0$0 {
+ a @ 0..15 => a == 0,
+ _ => false,
+ }
+}
+"#,
+ r#"
+fn main() {
+ matches!(0, a @ 0..15 if a == 0)
+}
+"#,
+ );
+ check_assist(
+ convert_two_arm_bool_match_to_matches_macro,
+ r#"
+fn main() {
+ match 0$0 {
+ a @ 0..15 if thing() => a == 0,
+ _ => false,
+ }
+}
+"#,
+ r#"
+fn main() {
+ matches!(0, a @ 0..15 if thing() && a == 0)
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
index ea71d165e..f30ca2552 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs
@@ -114,7 +114,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
let usages = ctx.sema.to_def(&ident_pat).map(|def| {
Definition::Local(def)
.usages(&ctx.sema)
- .in_scope(SearchScope::single_file(ctx.file_id()))
+ .in_scope(&SearchScope::single_file(ctx.file_id()))
.all()
});
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
index 226a5dd9f..ddc8a50ed 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
@@ -27,7 +27,9 @@ use crate::{
pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
// Only allow doc comments
- let Some(placement) = comment.kind().doc else { return None; };
+ let Some(placement) = comment.kind().doc else {
+ return None;
+ };
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
index 5c435dd9c..9beb616d9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs
@@ -1,5 +1,5 @@
use either::Either;
-use hir::{AssocItem, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef};
+use hir::{AssocItem, Enum, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef};
use ide_db::{
defs::{Definition, NameRefClass},
search::SearchScope,
@@ -45,7 +45,8 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let use_tree = star.parent().and_then(ast::UseTree::cast)?;
let (parent, mod_path) = find_parent_and_path(&star)?;
let target_module = match ctx.sema.resolve_path(&mod_path)? {
- PathResolution::Def(ModuleDef::Module(it)) => it,
+ PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it),
+ PathResolution::Def(ModuleDef::Adt(hir::Adt::Enum(e))) => Expandable::Enum(e),
_ => return None,
};
@@ -90,6 +91,11 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
)
}
+enum Expandable {
+ Module(Module),
+ Enum(Enum),
+}
+
fn find_parent_and_path(
star: &SyntaxToken,
) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> {
@@ -114,7 +120,7 @@ fn find_parent_and_path(
fn def_is_referenced_in(def: Definition, ctx: &AssistContext<'_>) -> bool {
let search_scope = SearchScope::single_file(ctx.file_id());
- def.usages(&ctx.sema).in_scope(search_scope).at_least_one()
+ def.usages(&ctx.sema).in_scope(&search_scope).at_least_one()
}
#[derive(Debug, Clone)]
@@ -168,23 +174,59 @@ impl Refs {
}
}
-fn find_refs_in_mod(ctx: &AssistContext<'_>, module: Module, visible_from: Module) -> Option<Refs> {
- if !is_mod_visible_from(ctx, module, visible_from) {
+fn find_refs_in_mod(
+ ctx: &AssistContext<'_>,
+ expandable: Expandable,
+ visible_from: Module,
+) -> Option<Refs> {
+ if !is_expandable_visible_from(ctx, &expandable, visible_from) {
return None;
}
- let module_scope = module.scope(ctx.db(), Some(visible_from));
- let refs = module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
- Some(Refs(refs))
+ match expandable {
+ Expandable::Module(module) => {
+ let module_scope = module.scope(ctx.db(), Some(visible_from));
+ let refs =
+ module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
+ Some(Refs(refs))
+ }
+ Expandable::Enum(enm) => Some(Refs(
+ enm.variants(ctx.db())
+ .into_iter()
+ .map(|v| Ref { visible_name: v.name(ctx.db()), def: Definition::Variant(v) })
+ .collect(),
+ )),
+ }
}
-fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
- match module.parent(ctx.db()) {
- Some(parent) => {
- module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
- && is_mod_visible_from(ctx, parent, from)
+fn is_expandable_visible_from(
+ ctx: &AssistContext<'_>,
+ expandable: &Expandable,
+ from: Module,
+) -> bool {
+ fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
+ match module.parent(ctx.db()) {
+ Some(parent) => {
+ module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, parent, from)
+ }
+ None => true,
+ }
+ }
+
+ match expandable {
+ Expandable::Module(module) => match module.parent(ctx.db()) {
+ Some(parent) => {
+ module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, parent, from)
+ }
+ None => true,
+ },
+ Expandable::Enum(enm) => {
+ let module = enm.module(ctx.db());
+ enm.visibility(ctx.db()).is_visible_from(ctx.db(), from.into())
+ && is_mod_visible_from(ctx, module, from)
}
- None => true,
}
}
@@ -897,4 +939,98 @@ struct Baz {
",
);
}
+
+ #[test]
+ fn test_support_for_enums() {
+ check_assist(
+ expand_glob_import,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+use foo::Foo;
+use foo::Foo::*$0;
+
+struct Strukt {
+ bar: Foo,
+}
+
+fn main() {
+ let s: Strukt = Strukt { bar: Bar };
+}"#,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+use foo::Foo;
+use foo::Foo::Bar;
+
+struct Strukt {
+ bar: Foo,
+}
+
+fn main() {
+ let s: Strukt = Strukt { bar: Bar };
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_expanding_multiple_variants_at_once() {
+ check_assist(
+ expand_glob_import,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+mod abc {
+ use super::foo;
+ use super::foo::Foo::*$0;
+
+ struct Strukt {
+ baz: foo::Foo,
+ bar: foo::Foo,
+ }
+
+ fn trying_calling() {
+ let s: Strukt = Strukt { bar: Bar , baz : Baz };
+ }
+
+}"#,
+ r#"
+mod foo {
+ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+
+mod abc {
+ use super::foo;
+ use super::foo::Foo::{Bar, Baz};
+
+ struct Strukt {
+ baz: foo::Foo,
+ bar: foo::Foo,
+ }
+
+ fn trying_calling() {
+ let s: Strukt = Strukt { bar: Bar , baz : Baz };
+ }
+
+}"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
index 2a67909e6..b8b781ea4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs
@@ -384,7 +384,7 @@ impl LocalUsages {
Self(
Definition::Local(var)
.usages(&ctx.sema)
- .in_scope(SearchScope::single_file(ctx.file_id()))
+ .in_scope(&SearchScope::single_file(ctx.file_id()))
.all(),
)
}
@@ -1360,14 +1360,15 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> St
}
format_to!(buf, "{expr}");
- let insert_comma = fun
- .body
- .parent()
- .and_then(ast::MatchArm::cast)
- .map_or(false, |it| it.comma_token().is_none());
+ let parent_match_arm = fun.body.parent().and_then(ast::MatchArm::cast);
+ let insert_comma = parent_match_arm.as_ref().is_some_and(|it| it.comma_token().is_none());
+
if insert_comma {
buf.push(',');
- } else if fun.ret_ty.is_unit() && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) {
+ } else if parent_match_arm.is_none()
+ && fun.ret_ty.is_unit()
+ && (!fun.outliving_locals.is_empty() || !expr.is_block_like())
+ {
buf.push(';');
}
buf
@@ -4611,6 +4612,29 @@ fn $0fun_name() -> i32 {
}
"#,
);
+
+ // Makes sure no semicolon is added for unit-valued match arms
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match () {
+ _ => $0()$0,
+ }
+}
+"#,
+ r#"
+fn main() {
+ match () {
+ _ => fun_name(),
+ }
+}
+
+fn $0fun_name() {
+ ()
+}
+"#,
+ )
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
index de37f5f13..6839c5820 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -478,7 +478,7 @@ impl Module {
let selection_range = ctx.selection_trimmed();
let curr_file_id = ctx.file_id();
let search_scope = SearchScope::single_file(curr_file_id);
- let usage_res = def.usages(&ctx.sema).in_scope(search_scope).all();
+ let usage_res = def.usages(&ctx.sema).in_scope(&search_scope).all();
let file = ctx.sema.parse(curr_file_id);
let mut exists_inside_sel = false;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
index d6c59a9c8..c9f272474 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -1,11 +1,11 @@
use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
use ide_db::base_db::FileId;
use syntax::{
- ast::{self, HasVisibility as _},
- AstNode, TextRange, TextSize,
+ ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
+ AstNode, TextRange,
};
-use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+use crate::{AssistContext, AssistId, AssistKind, Assists};
// FIXME: this really should be a fix for diagnostic, rather than an assist.
@@ -40,12 +40,16 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
let qualifier = path.qualifier()?;
let name_ref = path.segment()?.name_ref()?;
let qualifier_res = ctx.sema.resolve_path(&qualifier)?;
- let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else { return None; };
+ let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else {
+ return None;
+ };
let (_, def) = module
.scope(ctx.db(), None)
.into_iter()
.find(|(name, _)| name.to_smol_str() == name_ref.text().as_str())?;
- let ScopeDef::ModuleDef(def) = def else { return None; };
+ let ScopeDef::ModuleDef(def) = def else {
+ return None;
+ };
let current_module = ctx.sema.scope(path.syntax())?.module();
let target_module = def.module(ctx.db())?;
@@ -54,11 +58,13 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
return None;
};
- let (offset, current_visibility, target, target_file, target_name) =
- target_data_for_def(ctx.db(), def)?;
+ let (vis_owner, target, target_file, target_name) = target_data_for_def(ctx.db(), def)?;
- let missing_visibility =
- if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+ let missing_visibility = if current_module.krate() == target_module.krate() {
+ make::visibility_pub_crate()
+ } else {
+ make::visibility_pub()
+ };
let assist_label = match target_name {
None => format!("Change visibility to {missing_visibility}"),
@@ -67,23 +73,14 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
}
};
- acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
- builder.edit_file(target_file);
- match ctx.config.snippet_cap {
- Some(cap) => match current_visibility {
- Some(current_visibility) => builder.replace_snippet(
- cap,
- current_visibility.syntax().text_range(),
- format!("$0{missing_visibility}"),
- ),
- None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
- },
- None => match current_visibility {
- Some(current_visibility) => {
- builder.replace(current_visibility.syntax().text_range(), missing_visibility)
- }
- None => builder.insert(offset, format!("{missing_visibility} ")),
- },
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
+ edit.edit_file(target_file);
+
+ let vis_owner = edit.make_mut(vis_owner);
+ vis_owner.set_visibility(missing_visibility.clone_for_update());
+
+ if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
+ edit.add_tabstop_before(cap, vis);
}
})
}
@@ -103,19 +100,22 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
let target_module = parent.module(ctx.db());
let in_file_source = record_field_def.source(ctx.db())?;
- let (offset, current_visibility, target) = match in_file_source.value {
+ let (vis_owner, target) = match in_file_source.value {
hir::FieldSource::Named(it) => {
- let s = it.syntax();
- (vis_offset(s), it.visibility(), s.text_range())
+ let range = it.syntax().text_range();
+ (ast::AnyHasVisibility::new(it), range)
}
hir::FieldSource::Pos(it) => {
- let s = it.syntax();
- (vis_offset(s), it.visibility(), s.text_range())
+ let range = it.syntax().text_range();
+ (ast::AnyHasVisibility::new(it), range)
}
};
- let missing_visibility =
- if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+ let missing_visibility = if current_module.krate() == target_module.krate() {
+ make::visibility_pub_crate()
+ } else {
+ make::visibility_pub()
+ };
let target_file = in_file_source.file_id.original_file(ctx.db());
let target_name = record_field_def.name(ctx.db());
@@ -125,23 +125,14 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
target_name.display(ctx.db())
);
- acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
- builder.edit_file(target_file);
- match ctx.config.snippet_cap {
- Some(cap) => match current_visibility {
- Some(current_visibility) => builder.replace_snippet(
- cap,
- current_visibility.syntax().text_range(),
- format!("$0{missing_visibility}"),
- ),
- None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
- },
- None => match current_visibility {
- Some(current_visibility) => {
- builder.replace(current_visibility.syntax().text_range(), missing_visibility)
- }
- None => builder.insert(offset, format!("{missing_visibility} ")),
- },
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {
+ edit.edit_file(target_file);
+
+ let vis_owner = edit.make_mut(vis_owner);
+ vis_owner.set_visibility(missing_visibility.clone_for_update());
+
+ if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
+ edit.add_tabstop_before(cap, vis);
}
})
}
@@ -149,11 +140,11 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
fn target_data_for_def(
db: &dyn HirDatabase,
def: hir::ModuleDef,
-) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId, Option<hir::Name>)> {
+) -> Option<(ast::AnyHasVisibility, TextRange, FileId, Option<hir::Name>)> {
fn offset_target_and_file_id<S, Ast>(
db: &dyn HirDatabase,
x: S,
- ) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId)>
+ ) -> Option<(ast::AnyHasVisibility, TextRange, FileId)>
where
S: HasSource<Ast = Ast>,
Ast: AstNode + ast::HasVisibility,
@@ -161,18 +152,12 @@ fn target_data_for_def(
let source = x.source(db)?;
let in_file_syntax = source.syntax();
let file_id = in_file_syntax.file_id;
- let syntax = in_file_syntax.value;
- let current_visibility = source.value.visibility();
- Some((
- vis_offset(syntax),
- current_visibility,
- syntax.text_range(),
- file_id.original_file(db.upcast()),
- ))
+ let range = in_file_syntax.value.text_range();
+ Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db.upcast())))
}
let target_name;
- let (offset, current_visibility, target, target_file) = match def {
+ let (offset, target, target_file) = match def {
hir::ModuleDef::Function(f) => {
target_name = Some(f.name(db));
offset_target_and_file_id(db, f)?
@@ -209,8 +194,8 @@ fn target_data_for_def(
target_name = m.name(db);
let in_file_source = m.declaration_source(db)?;
let file_id = in_file_source.file_id.original_file(db.upcast());
- let syntax = in_file_source.value.syntax();
- (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id)
+ let range = in_file_source.value.syntax().text_range();
+ (ast::AnyHasVisibility::new(in_file_source.value), range, file_id)
}
// FIXME
hir::ModuleDef::Macro(_) => return None,
@@ -218,7 +203,7 @@ fn target_data_for_def(
hir::ModuleDef::Variant(_) | hir::ModuleDef::BuiltinType(_) => return None,
};
- Some((offset, current_visibility, target, target_file, target_name))
+ Some((offset, target, target_file, target_name))
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
index 860372941..7e4f140a2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -15,6 +15,7 @@ use crate::{
// Generates default implementation from new method.
//
// ```
+// # //- minicore: default
// struct Example { _inner: () }
//
// impl Example {
@@ -54,6 +55,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
}
let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?;
+ let self_ty = impl_.self_ty()?;
if is_default_implemented(ctx, &impl_) {
cov_mark::hit!(default_block_is_already_present);
cov_mark::hit!(struct_in_module_with_default);
@@ -70,15 +72,19 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<'
let default_code = " fn default() -> Self {
Self::new()
}";
- let code = generate_trait_impl_text_from_impl(&impl_, "Default", default_code);
+ let code = generate_trait_impl_text_from_impl(&impl_, self_ty, "Default", default_code);
builder.insert(insert_location.end(), code);
},
)
}
// FIXME: based on from utils::generate_impl_text_inner
-fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String {
- let impl_ty = impl_.self_ty().unwrap();
+fn generate_trait_impl_text_from_impl(
+ impl_: &ast::Impl,
+ self_ty: ast::Type,
+ trait_text: &str,
+ code: &str,
+) -> String {
let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
@@ -109,7 +115,7 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code:
if let Some(generic_params) = &generic_params {
format_to!(buf, "{generic_params}")
}
- format_to!(buf, " {trait_text} for {impl_ty}");
+ format_to!(buf, " {trait_text} for {self_ty}");
match impl_.where_clause() {
Some(where_clause) => {
@@ -136,7 +142,9 @@ fn is_default_implemented(ctx: &AssistContext<'_>, impl_: &Impl) -> bool {
let default = FamousDefs(&ctx.sema, krate).core_default_Default();
let default_trait = match default {
Some(value) => value,
- None => return false,
+ // Return `true` to avoid providing the assist because it makes no sense
+ // to impl `Default` when it's missing.
+ None => return true,
};
ty.impls_trait(db, default_trait, &[])
@@ -480,6 +488,7 @@ impl Example {
check_assist_not_applicable(
generate_default_from_new,
r#"
+//- minicore: default
struct Example { _inner: () }
impl Example {
@@ -655,4 +664,23 @@ mod test {
"#,
);
}
+
+ #[test]
+ fn not_applicable_when_default_lang_item_is_missing() {
+ check_assist_not_applicable(
+ generate_default_from_new,
+ r#"
+struct S;
+impl S {
+ fn new$0() -> Self {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn not_applicable_for_missing_self_ty() {
+ // Regression test for #15398.
+ check_assist_not_applicable(generate_default_from_new, "impl { fn new$0() -> Self {} }");
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index b68c766e6..31fc69562 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,13 +1,17 @@
use std::collections::HashSet;
use hir::{self, HasCrate, HasSource, HasVisibility};
-use syntax::ast::{self, make, AstNode, HasGenericParams, HasName, HasVisibility as _};
+use syntax::{
+ ast::{
+ self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
+ },
+ ted,
+};
use crate::{
- utils::{convert_param_list_to_arg_list, find_struct_impl, render_snippet, Cursor},
+ utils::{convert_param_list_to_arg_list, find_struct_impl},
AssistContext, AssistId, AssistKind, Assists, GroupLabel,
};
-use syntax::ast::edit::AstNodeEdit;
// Assist: generate_delegate_methods
//
@@ -88,13 +92,15 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let adt = ast::Adt::Struct(strukt.clone());
let name = name.display(ctx.db()).to_string();
// if `find_struct_impl` returns None, that means that a function named `name` already exists.
- let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { continue; };
+ let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else {
+ continue;
+ };
acc.add_group(
&GroupLabel("Generate delegate methods…".to_owned()),
AssistId("generate_delegate_methods", AssistKind::Generate),
format!("Generate delegate for `{field_name}.{name}()`",),
target,
- |builder| {
+ |edit| {
// Create the function
let method_source = match method.source(ctx.db()) {
Some(source) => source.value,
@@ -133,36 +139,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
is_const,
is_unsafe,
)
- .indent(ast::edit::IndentLevel(1))
.clone_for_update();
- let cursor = Cursor::Before(f.syntax());
-
- // Create or update an impl block, attach the function to it,
- // then insert into our code.
- match impl_def {
- Some(impl_def) => {
- // Remember where in our source our `impl` block lives.
- let impl_def = impl_def.clone_for_update();
- let old_range = impl_def.syntax().text_range();
-
- // Attach the function to the impl block
- let assoc_items = impl_def.get_or_create_assoc_item_list();
- assoc_items.add_item(f.clone().into());
-
- // Update the impl block.
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snippet = render_snippet(cap, impl_def.syntax(), cursor);
- builder.replace_snippet(cap, old_range, snippet);
- }
- None => {
- builder.replace(old_range, impl_def.syntax().to_string());
- }
- }
- }
+ // Get the impl to update, or create one if we need to.
+ let impl_def = match impl_def {
+ Some(impl_def) => edit.make_mut(impl_def),
None => {
- // Attach the function to the impl block
let name = &strukt_name.to_string();
let params = strukt.generic_param_list();
let ty_params = params.clone();
@@ -176,24 +158,34 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
None,
)
.clone_for_update();
- let assoc_items = impl_def.get_or_create_assoc_item_list();
- assoc_items.add_item(f.clone().into());
+
+ // Fixup impl_def indentation
+ let indent = strukt.indent_level();
+ impl_def.reindent_to(indent);
// Insert the impl block.
- match ctx.config.snippet_cap {
- Some(cap) => {
- let offset = strukt.syntax().text_range().end();
- let snippet = render_snippet(cap, impl_def.syntax(), cursor);
- let snippet = format!("\n\n{snippet}");
- builder.insert_snippet(cap, offset, snippet);
- }
- None => {
- let offset = strukt.syntax().text_range().end();
- let snippet = format!("\n\n{}", impl_def.syntax());
- builder.insert(offset, snippet);
- }
- }
+ let strukt = edit.make_mut(strukt.clone());
+ ted::insert_all(
+ ted::Position::after(strukt.syntax()),
+ vec![
+ make::tokens::whitespace(&format!("\n\n{indent}")).into(),
+ impl_def.syntax().clone().into(),
+ ],
+ );
+
+ impl_def
}
+ };
+
+ // Fixup function indentation.
+ // FIXME: Should really be handled by `AssocItemList::add_item`
+ f.reindent_to(impl_def.indent_level() + 1);
+
+ let assoc_items = impl_def.get_or_create_assoc_item_list();
+ assoc_items.add_item(f.clone().into());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ edit.add_tabstop_before(cap, f)
}
},
)?;
@@ -243,6 +235,45 @@ impl Person {
}
#[test]
+ fn test_generate_delegate_create_impl_block_match_indent() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ ag$0e: Age,
+ }
+}"#,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ age: Age,
+ }
+
+ impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn test_generate_delegate_update_impl_block() {
check_assist(
generate_delegate_methods,
@@ -280,6 +311,47 @@ impl Person {
}
#[test]
+ fn test_generate_delegate_update_impl_block_match_indent() {
+ check_assist(
+ generate_delegate_methods,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ ag$0e: Age,
+ }
+
+ impl Person {}
+}"#,
+ r#"
+mod indent {
+ struct Age(u8);
+ impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+ }
+
+ struct Person {
+ age: Age,
+ }
+
+ impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn test_generate_delegate_tuple_struct() {
check_assist(
generate_delegate_methods,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs
new file mode 100644
index 000000000..f4fa6a74c
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -0,0 +1,1051 @@
+use std::ops::Not;
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::convert_param_list_to_arg_list,
+};
+use either::Either;
+use hir::{db::HirDatabase, HasVisibility};
+use ide_db::{
+ assists::{AssistId, GroupLabel},
+ path_transform::PathTransform,
+};
+use syntax::{
+ ast::{
+ self,
+ edit::{self, AstNodeEdit},
+ make, AssocItem, HasGenericParams, HasName, HasVisibility as astHasVisibility, Path,
+ },
+ ted::{self, Position},
+ AstNode, NodeOrToken, SyntaxKind,
+};
+
+// Assist: generate_delegate_trait
+//
+// Generate delegate trait implementation for `StructField`s.
+//
+// ```
+// trait SomeTrait {
+// type T;
+// fn fn_(arg: u32) -> u32;
+// fn method_(&mut self) -> bool;
+// }
+// struct A;
+// impl SomeTrait for A {
+// type T = u32;
+//
+// fn fn_(arg: u32) -> u32 {
+// 42
+// }
+//
+// fn method_(&mut self) -> bool {
+// false
+// }
+// }
+// struct B {
+// a$0: A,
+// }
+// ```
+// ->
+// ```
+// trait SomeTrait {
+// type T;
+// fn fn_(arg: u32) -> u32;
+// fn method_(&mut self) -> bool;
+// }
+// struct A;
+// impl SomeTrait for A {
+// type T = u32;
+//
+// fn fn_(arg: u32) -> u32 {
+// 42
+// }
+//
+// fn method_(&mut self) -> bool {
+// false
+// }
+// }
+// struct B {
+// a: A,
+// }
+//
+// impl SomeTrait for B {
+// type T = <A as SomeTrait>::T;
+//
+// fn fn_(arg: u32) -> u32 {
+// <A as SomeTrait>::fn_(arg)
+// }
+//
+// fn method_(&mut self) -> bool {
+// <A as SomeTrait>::method_( &mut self.a )
+// }
+// }
+// ```
+pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?;
+
+ let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() {
+ Some(field) => Field::new(&ctx, Either::Left(field))?,
+ None => {
+ let field = ctx.find_node_at_offset::<ast::TupleField>()?;
+ let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?;
+ Field::new(&ctx, either::Right((field, field_list)))?
+ }
+ };
+
+ strukt.delegate(field, acc, ctx);
+ Some(())
+}
+
+/// A utility object that represents a struct's field.
+struct Field {
+ name: String,
+ ty: ast::Type,
+ range: syntax::TextRange,
+ impls: Vec<Delegee>,
+}
+
+impl Field {
+ pub(crate) fn new(
+ ctx: &AssistContext<'_>,
+ f: Either<ast::RecordField, (ast::TupleField, ast::TupleFieldList)>,
+ ) -> Option<Field> {
+ let db = ctx.sema.db;
+ let name: String;
+ let range: syntax::TextRange;
+ let ty: ast::Type;
+
+ let module = ctx.sema.to_module_def(ctx.file_id())?;
+
+ match f {
+ Either::Left(f) => {
+ name = f.name()?.to_string();
+ ty = f.ty()?;
+ range = f.syntax().text_range();
+ }
+ Either::Right((f, l)) => {
+ name = l.fields().position(|it| it == f)?.to_string();
+ ty = f.ty()?;
+ range = f.syntax().text_range();
+ }
+ };
+
+ let hir_ty = ctx.sema.resolve_type(&ty)?;
+ let type_impls = hir::Impl::all_for_type(db, hir_ty.clone());
+ let mut impls = Vec::with_capacity(type_impls.len());
+ let type_param = hir_ty.as_type_param(db);
+
+ if let Some(tp) = type_param {
+ for tb in tp.trait_bounds(db) {
+ impls.push(Delegee::Bound(BoundCase(tb)));
+ }
+ };
+
+ for imp in type_impls {
+ match imp.trait_(db) {
+ Some(tr) => {
+ if tr.is_visible_from(db, module) {
+ impls.push(Delegee::Impls(ImplCase(tr, imp)))
+ }
+ }
+ None => (),
+ }
+ }
+
+ Some(Field { name, ty, range, impls })
+ }
+}
+
+/// A field that we want to delegate can offer the enclosing struct
+/// trait to implement in two ways. The first way is when the field
+/// actually implements the trait and the second way is when the field
+/// has a bound type parameter. We handle these cases in different ways
+/// hence the enum.
+enum Delegee {
+ Bound(BoundCase),
+ Impls(ImplCase),
+}
+
+struct BoundCase(hir::Trait);
+struct ImplCase(hir::Trait, hir::Impl);
+
+impl Delegee {
+ fn signature(&self, db: &dyn HirDatabase) -> String {
+ let mut s = String::new();
+
+ let (Delegee::Bound(BoundCase(it)) | Delegee::Impls(ImplCase(it, _))) = self;
+
+ for m in it.module(db).path_to_root(db).iter().rev() {
+ if let Some(name) = m.name(db) {
+ s.push_str(&format!("{}::", name.to_smol_str()));
+ }
+ }
+
+ s.push_str(&it.name(db).to_smol_str());
+ s
+ }
+}
+
+/// A utility struct that is used for the enclosing struct.
+struct Struct {
+ strukt: ast::Struct,
+ name: ast::Name,
+}
+
+impl Struct {
+ pub(crate) fn new(s: ast::Struct) -> Option<Self> {
+ let name = s.name()?;
+ Some(Struct { name, strukt: s })
+ }
+
+ pub(crate) fn delegate(&self, field: Field, acc: &mut Assists, ctx: &AssistContext<'_>) {
+ let db = ctx.db();
+ for delegee in &field.impls {
+ // FIXME : We can omit already implemented impl_traits
+ // But we don't know what the &[hir::Type] argument should look like.
+
+ // let trait_ = match delegee {
+ // Delegee::Bound(b) => b.0,
+ // Delegee::Impls(i) => i.1,
+ // };
+
+ // if self.hir_ty.impls_trait(db, trait_, &[]) {
+ // continue;
+ // }
+ let signature = delegee.signature(db);
+ let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else {
+ continue;
+ };
+
+ acc.add_group(
+ &GroupLabel("Delegate trait impl for field...".to_owned()),
+ AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate),
+ format!("Generate delegate impl `{}` for `{}`", signature, field.name),
+ field.range,
+ |builder| {
+ builder.insert(
+ self.strukt.syntax().text_range().end(),
+ format!("\n\n{}", delegate.syntax()),
+ );
+ },
+ );
+ }
+ }
+}
+
+fn generate_impl(
+ ctx: &AssistContext<'_>,
+ strukt: &Struct,
+ field_ty: &ast::Type,
+ field_name: &String,
+ delegee: &Delegee,
+) -> Option<ast::Impl> {
+ let delegate: ast::Impl;
+ let source: ast::Impl;
+ let genpar: Option<ast::GenericParamList>;
+ let db = ctx.db();
+ let base_path = make::path_from_text(&field_ty.to_string().as_str());
+ let s_path = make::ext::ident_path(&strukt.name.to_string());
+
+ match delegee {
+ Delegee::Bound(delegee) => {
+ let in_file = ctx.sema.source(delegee.0.to_owned())?;
+ let source: ast::Trait = in_file.value;
+
+ delegate = make::impl_trait(
+ delegee.0.is_unsafe(db),
+ None,
+ None,
+ strukt.strukt.generic_param_list(),
+ None,
+ delegee.0.is_auto(db),
+ make::ty(&delegee.0.name(db).to_smol_str()),
+ make::ty_path(s_path),
+ source.where_clause(),
+ strukt.strukt.where_clause(),
+ None,
+ )
+ .clone_for_update();
+
+ genpar = source.generic_param_list();
+ let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
+ let gen_args: String =
+ genpar.map_or_else(String::new, |params| params.to_generic_args().to_string());
+
+ // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
+ let qualified_path_type = make::path_from_text(&format!(
+ "<{} as {}{}>",
+ base_path.to_string(),
+ delegee.0.name(db).to_smol_str(),
+ gen_args.to_string()
+ ));
+
+ match source.assoc_item_list() {
+ Some(ai) => {
+ ai.assoc_items()
+ .filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
+ .for_each(|item| {
+ let assoc =
+ process_assoc_item(item, qualified_path_type.clone(), &field_name);
+ if let Some(assoc) = assoc {
+ delegate_assoc_items.add_item(assoc);
+ }
+ });
+ }
+ None => {}
+ };
+
+ let target = ctx.sema.scope(strukt.strukt.syntax())?;
+ let source = ctx.sema.scope(source.syntax())?;
+
+ let transform =
+ PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
+ transform.apply(&delegate.syntax());
+ }
+ Delegee::Impls(delegee) => {
+ let in_file = ctx.sema.source(delegee.1.to_owned())?;
+ source = in_file.value;
+ delegate = make::impl_trait(
+ delegee.0.is_unsafe(db),
+ source.generic_param_list(),
+ None,
+ None,
+ None,
+ delegee.0.is_auto(db),
+ make::ty(&delegee.0.name(db).to_smol_str()),
+ make::ty_path(s_path),
+ source.where_clause(),
+ strukt.strukt.where_clause(),
+ None,
+ )
+ .clone_for_update();
+ genpar = source.generic_param_list();
+ let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
+ let gen_args: String =
+ genpar.map_or_else(String::new, |params| params.to_generic_args().to_string());
+
+ // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
+ let qualified_path_type = make::path_from_text(&format!(
+ "<{} as {}{}>",
+ base_path.to_string().as_str(),
+ delegee.0.name(db).to_smol_str(),
+ gen_args.to_string().as_str()
+ ));
+
+ source
+ .get_or_create_assoc_item_list()
+ .assoc_items()
+ .filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
+ .for_each(|item| {
+ let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name);
+ if let Some(assoc) = assoc {
+ delegate_assoc_items.add_item(assoc);
+ }
+ });
+
+ let target = ctx.sema.scope(strukt.strukt.syntax())?;
+ let source = ctx.sema.scope(source.syntax())?;
+
+ let transform =
+ PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
+ transform.apply(&delegate.syntax());
+ }
+ }
+
+ Some(delegate)
+}
+
+fn process_assoc_item(
+ item: syntax::ast::AssocItem,
+ qual_path_ty: ast::Path,
+ base_name: &str,
+) -> Option<ast::AssocItem> {
+ match item {
+ AssocItem::Const(c) => const_assoc_item(c, qual_path_ty),
+ AssocItem::Fn(f) => func_assoc_item(f, qual_path_ty, base_name),
+ AssocItem::MacroCall(_) => {
+ // FIXME : Handle MacroCall case.
+ // macro_assoc_item(mac, qual_path_ty)
+ None
+ }
+ AssocItem::TypeAlias(ta) => ty_assoc_item(ta, qual_path_ty),
+ }
+}
+
+fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> Option<AssocItem> {
+ let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
+
+ // We want rhs of the const assignment to be a qualified path
+ // The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`)
+ // The qualified will have the following generic syntax :
+ // <Base as Trait<GenArgs>>::ConstName;
+ // FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
+ // make::path_qualified(qual_path_ty, path_expr_segment.as_single_segment().unwrap());
+ let qualpath = qualpath(qual_path_ty, path_expr_segment);
+ let inner =
+ make::item_const(item.visibility(), item.name()?, item.ty()?, make::expr_path(qualpath))
+ .clone_for_update();
+
+ Some(AssocItem::Const(inner))
+}
+
+fn func_assoc_item(
+ item: syntax::ast::Fn,
+ qual_path_ty: Path,
+ base_name: &str,
+) -> Option<AssocItem> {
+ let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
+ let qualpath = qualpath(qual_path_ty, path_expr_segment);
+
+ let call = match item.param_list() {
+ // Methods and funcs should be handled separately.
+ // We ask if the func has a `self` param.
+ Some(l) => match l.self_param() {
+ Some(slf) => {
+ let mut self_kw = make::expr_path(make::path_from_text("self"));
+ self_kw = make::expr_field(self_kw, base_name);
+
+ let tail_expr_self = match slf.kind() {
+ ast::SelfParamKind::Owned => self_kw,
+ ast::SelfParamKind::Ref => make::expr_ref(self_kw, false),
+ ast::SelfParamKind::MutRef => make::expr_ref(self_kw, true),
+ };
+
+ let param_count = l.params().count();
+ let args = convert_param_list_to_arg_list(l).clone_for_update();
+
+ if param_count > 0 {
+ // Add SelfParam and a TOKEN::COMMA
+ ted::insert_all(
+ Position::after(args.l_paren_token()?),
+ vec![
+ NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
+ NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)),
+ NodeOrToken::Token(make::token(SyntaxKind::COMMA)),
+ ],
+ );
+ } else {
+ // Add SelfParam only
+ ted::insert(
+ Position::after(args.l_paren_token()?),
+ NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
+ );
+ }
+
+ make::expr_call(make::expr_path(qualpath), args)
+ }
+ None => make::expr_call(make::expr_path(qualpath), convert_param_list_to_arg_list(l)),
+ },
+ None => make::expr_call(
+ make::expr_path(qualpath),
+ convert_param_list_to_arg_list(make::param_list(None, Vec::new())),
+ ),
+ }
+ .clone_for_update();
+
+ let body = make::block_expr(vec![], Some(call)).clone_for_update();
+ let func = make::fn_(
+ item.visibility(),
+ item.name()?,
+ item.generic_param_list(),
+ item.where_clause(),
+ item.param_list()?,
+ body,
+ item.ret_type(),
+ item.async_token().is_some(),
+ item.const_token().is_some(),
+ item.unsafe_token().is_some(),
+ )
+ .clone_for_update();
+
+ Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update()))
+}
+
+fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> {
+ let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
+ let qualpath = qualpath(qual_path_ty, path_expr_segment);
+ let ty = make::ty_path(qualpath);
+ let ident = item.name()?.to_string();
+
+ let alias = make::ty_alias(
+ ident.as_str(),
+ item.generic_param_list(),
+ None,
+ item.where_clause(),
+ Some((ty, None)),
+ )
+ .clone_for_update();
+
+ Some(AssocItem::TypeAlias(alias))
+}
+
+fn qualpath(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
+ make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string()))
+}
+
+#[cfg(test)]
+mod test {
+
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_tuple_struct_basic() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S(B$0ase);
+trait Trait {}
+impl Trait for Base {}
+"#,
+ r#"
+struct Base;
+struct S(Base);
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_struct_basic() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ba$0se : Base
+}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ r#"
+struct Base;
+struct S {
+ base : Base
+}
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ )
+ }
+
+ // Structs need to be by def populated with fields
+ // However user can invoke this assist while still editing
+ // We therefore assert its non-applicability
+ #[test]
+ fn test_yet_empty_struct() {
+ check_assist_not_applicable(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ $0
+}
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_yet_unspecified_field_type() {
+ check_assist_not_applicable(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ab$0c
+}
+
+impl Trait for S {}
+trait Trait {}
+impl Trait for Base {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unsafe_trait() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ba$0se : Base
+}
+unsafe trait Trait {}
+unsafe impl Trait for Base {}
+"#,
+ r#"
+struct Base;
+struct S {
+ base : Base
+}
+
+unsafe impl Trait for S {}
+unsafe trait Trait {}
+unsafe impl Trait for Base {}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unsafe_trait_with_unsafe_fn() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct Base;
+struct S {
+ ba$0se: Base,
+}
+
+unsafe trait Trait {
+ unsafe fn a_func();
+ unsafe fn a_method(&self);
+}
+unsafe impl Trait for Base {
+ unsafe fn a_func() {}
+ unsafe fn a_method(&self) {}
+}
+"#,
+ r#"
+struct Base;
+struct S {
+ base: Base,
+}
+
+unsafe impl Trait for S {
+ unsafe fn a_func() {
+ <Base as Trait>::a_func()
+ }
+
+ unsafe fn a_method(&self) {
+ <Base as Trait>::a_method( &self.base )
+ }
+}
+
+unsafe trait Trait {
+ unsafe fn a_func();
+ unsafe fn a_method(&self);
+}
+unsafe impl Trait for Base {
+ unsafe fn a_func() {}
+ unsafe fn a_method(&self) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_with_where_clause() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b$0 : T,
+}"#,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b : T,
+}
+
+impl<T> AnotherTrait for S<T>
+where
+ T: AnotherTrait,
+{}"#,
+ );
+ }
+
+ #[test]
+ fn test_complex_without_where() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field$0: Base
+}
+
+impl<'a, T, const C: usize> Trait<'a, T, C> for Base {
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ r#"
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field: Base
+}
+
+impl<'a, T, const C: usize> Trait<'a, T, C> for S {
+ type AssocType = <Base as Trait<'a, T, C>>::AssocType;
+
+ const AssocConst: usize = <Base as Trait<'a, T, C>>::AssocConst;
+
+ fn assoc_fn(p: ()) {
+ <Base as Trait<'a, T, C>>::assoc_fn(p)
+ }
+
+ fn assoc_method(&self, p: ()) {
+ <Base as Trait<'a, T, C>>::assoc_method( &self.field , p)
+ }
+}
+
+impl<'a, T, const C: usize> Trait<'a, T, C> for Base {
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_complex_two() {
+ check_assist(
+ generate_delegate_trait,
+ r"
+trait AnotherTrait {}
+
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ fi$0eld: Base,
+}
+
+impl<'b, C, const D: usize> Trait<'b, C, D> for Base
+where
+ C: AnotherTrait,
+{
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}",
+ r#"
+trait AnotherTrait {}
+
+trait Trait<'a, T, const C: usize> {
+ type AssocType;
+ const AssocConst: usize;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field: Base,
+}
+
+impl<'b, C, const D: usize> Trait<'b, C, D> for S
+where
+ C: AnotherTrait,
+{
+ type AssocType = <Base as Trait<'b, C, D>>::AssocType;
+
+ const AssocConst: usize = <Base as Trait<'b, C, D>>::AssocConst;
+
+ fn assoc_fn(p: ()) {
+ <Base as Trait<'b, C, D>>::assoc_fn(p)
+ }
+
+ fn assoc_method(&self, p: ()) {
+ <Base as Trait<'b, C, D>>::assoc_method( &self.field , p)
+ }
+}
+
+impl<'b, C, const D: usize> Trait<'b, C, D> for Base
+where
+ C: AnotherTrait,
+{
+ type AssocType = ();
+ const AssocConst: usize = 0;
+ fn assoc_fn(p: ()) {}
+ fn assoc_method(&self, p: ()) {}
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_complex_three() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+trait YetAnotherTrait {}
+
+struct StructImplsAll();
+impl AnotherTrait for StructImplsAll {}
+impl YetAnotherTrait for StructImplsAll {}
+
+trait Trait<'a, T, const C: usize> {
+ type A;
+ const ASSOC_CONST: usize = C;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ fi$0eld: Base,
+}
+
+impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for Base
+where
+ A: AnotherTrait,
+{
+ type A = i32;
+
+ const ASSOC_CONST: usize = B;
+
+ fn assoc_fn(p: ()) {}
+
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ r#"
+trait AnotherTrait {}
+trait YetAnotherTrait {}
+
+struct StructImplsAll();
+impl AnotherTrait for StructImplsAll {}
+impl YetAnotherTrait for StructImplsAll {}
+
+trait Trait<'a, T, const C: usize> {
+ type A;
+ const ASSOC_CONST: usize = C;
+ fn assoc_fn(p: ());
+ fn assoc_method(&self, p: ());
+}
+
+struct Base;
+struct S {
+ field: Base,
+}
+
+impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for S
+where
+ A: AnotherTrait,
+{
+ type A = <Base as Trait<'b, A, B>>::A;
+
+ const ASSOC_CONST: usize = <Base as Trait<'b, A, B>>::ASSOC_CONST;
+
+ fn assoc_fn(p: ()) {
+ <Base as Trait<'b, A, B>>::assoc_fn(p)
+ }
+
+ fn assoc_method(&self, p: ()) {
+ <Base as Trait<'b, A, B>>::assoc_method( &self.field , p)
+ }
+}
+
+impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for Base
+where
+ A: AnotherTrait,
+{
+ type A = i32;
+
+ const ASSOC_CONST: usize = B;
+
+ fn assoc_fn(p: ()) {}
+
+ fn assoc_method(&self, p: ()) {}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_type_bound() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b$0: T,
+}"#,
+ r#"
+trait AnotherTrait {}
+struct S<T>
+where
+ T: AnotherTrait,
+{
+ b: T,
+}
+
+impl<T> AnotherTrait for S<T>
+where
+ T: AnotherTrait,
+{}"#,
+ );
+ }
+
+ #[test]
+ fn test_docstring_example() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a$0: A,
+}
+"#,
+ r#"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a: A,
+}
+
+impl SomeTrait for B {
+ type T = <A as SomeTrait>::T;
+
+ fn fn_(arg: u32) -> u32 {
+ <A as SomeTrait>::fn_(arg)
+ }
+
+ fn method_(&mut self) -> bool {
+ <A as SomeTrait>::method_( &mut self.a )
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn import_from_other_mod() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+mod some_module {
+ pub trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+ }
+ pub struct A;
+ impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+ }
+}
+
+struct B {
+ a$0: some_module::A,
+}"#,
+ r#"
+mod some_module {
+ pub trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+ }
+ pub struct A;
+ impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+ }
+}
+
+struct B {
+ a: some_module::A,
+}
+
+impl some_module::SomeTrait for B {
+ type T = <some_module::A as some_module::SomeTrait>::T;
+
+ fn fn_(arg: u32) -> u32 {
+ <some_module::A as some_module::SomeTrait>::fn_(arg)
+ }
+
+ fn method_(&mut self) -> bool {
+ <some_module::A as some_module::SomeTrait>::method_( &mut self.a )
+ }
+}"#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
index 78ac2eb30..747f70f9f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs
@@ -1,7 +1,6 @@
use syntax::{
- ast::{self, edit::IndentLevel, AstNode, HasAttrs},
- SyntaxKind::{COMMENT, WHITESPACE},
- TextSize,
+ ast::{self, edit_in_place::AttrsOwnerEdit, make, AstNode, HasAttrs},
+ T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -27,48 +26,37 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let cap = ctx.config.snippet_cap?;
let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
- let node_start = derive_insertion_offset(&nominal)?;
let target = nominal.syntax().text_range();
- acc.add(
- AssistId("generate_derive", AssistKind::Generate),
- "Add `#[derive]`",
- target,
- |builder| {
- let derive_attr = nominal
- .attrs()
- .filter_map(|x| x.as_simple_call())
- .filter(|(name, _arg)| name == "derive")
- .map(|(_name, arg)| arg)
- .next();
- match derive_attr {
- None => {
- let indent_level = IndentLevel::from_node(nominal.syntax());
- builder.insert_snippet(
- cap,
- node_start,
- format!("#[derive($0)]\n{indent_level}"),
- );
- }
- Some(tt) => {
- // Just move the cursor.
- builder.insert_snippet(
- cap,
- tt.syntax().text_range().end() - TextSize::of(')'),
- "$0",
- )
- }
- };
- },
- )
-}
+ acc.add(AssistId("generate_derive", AssistKind::Generate), "Add `#[derive]`", target, |edit| {
+ let derive_attr = nominal
+ .attrs()
+ .filter_map(|x| x.as_simple_call())
+ .filter(|(name, _arg)| name == "derive")
+ .map(|(_name, arg)| arg)
+ .next();
+ match derive_attr {
+ None => {
+ let derive = make::attr_outer(make::meta_token_tree(
+ make::ext::ident_path("derive"),
+ make::token_tree(T!['('], vec![]).clone_for_update(),
+ ))
+ .clone_for_update();
+
+ let nominal = edit.make_mut(nominal);
+ nominal.add_attr(derive.clone());
-// Insert `derive` after doc comments.
-fn derive_insertion_offset(nominal: &ast::Adt) -> Option<TextSize> {
- let non_ws_child = nominal
- .syntax()
- .children_with_tokens()
- .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
- Some(non_ws_child.text_range().start())
+ edit.add_tabstop_before_token(
+ cap,
+ derive.meta().unwrap().token_tree().unwrap().r_paren_token().unwrap(),
+ );
+ }
+ Some(tt) => {
+ // Just move the cursor.
+ let tt = edit.make_mut(tt);
+ edit.add_tabstop_before_token(cap, tt.right_delimiter_token().unwrap());
+ }
+ };
+ })
}
#[cfg(test)]
@@ -115,6 +103,38 @@ mod m {
}
#[test]
+ fn add_derive_existing_with_brackets() {
+ check_assist(
+ generate_derive,
+ "
+#[derive[Clone]]
+struct Foo { a: i32$0, }
+",
+ "
+#[derive[Clone$0]]
+struct Foo { a: i32, }
+",
+ );
+ }
+
+ #[test]
+ fn add_derive_existing_missing_delimiter() {
+ // since `#[derive]` isn't a simple attr call (i.e. `#[derive()]`)
+ // we don't consider it as a proper derive attr and generate a new
+ // one instead
+ check_assist(
+ generate_derive,
+ "
+#[derive]
+struct Foo { a: i32$0, }",
+ "
+#[derive]
+#[derive($0)]
+struct Foo { a: i32, }",
+ );
+ }
+
+ #[test]
fn add_derive_new_with_doc_comment() {
check_assist(
generate_derive,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index c579f6780..5b13e01b1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -623,7 +623,9 @@ fn fn_generic_params(
fn params_and_where_preds_in_scope(
ctx: &AssistContext<'_>,
) -> (Vec<ast::GenericParam>, Vec<ast::WherePred>) {
- let Some(body) = containing_body(ctx) else { return Default::default(); };
+ let Some(body) = containing_body(ctx) else {
+ return Default::default();
+ };
let mut generic_params = Vec::new();
let mut where_clauses = Vec::new();
@@ -1876,7 +1878,6 @@ where
#[test]
fn add_function_with_fn_arg() {
- // FIXME: The argument in `bar` is wrong.
check_assist(
generate_function,
r"
@@ -1897,7 +1898,7 @@ fn foo() {
bar(Baz::new);
}
-fn bar(new: fn) ${0:-> _} {
+fn bar(new: fn() -> Baz) ${0:-> _} {
todo!()
}
",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
index dd6bbd84a..9c9478b04 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs
@@ -1,4 +1,4 @@
-use ide_db::famous_defs::FamousDefs;
+use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder};
use stdx::{format_to, to_lower_snake_case};
use syntax::{
ast::{self, AstNode, HasName, HasVisibility},
@@ -10,6 +10,66 @@ use crate::{
AssistContext, AssistId, AssistKind, Assists, GroupLabel,
};
+// Assist: generate_setter
+//
+// Generate a setter method.
+//
+// ```
+// struct Person {
+// nam$0e: String,
+// }
+// ```
+// ->
+// ```
+// struct Person {
+// name: String,
+// }
+//
+// impl Person {
+// fn $0set_name(&mut self, name: String) {
+// self.name = name;
+// }
+// }
+// ```
+pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // This if condition denotes two modes this assist can work in:
+ // - First is acting upon selection of record fields
+ // - Next is acting upon a single record field
+ //
+ // This is the only part where implementation diverges a bit,
+ // subsequent code is generic for both of these modes
+
+ let (strukt, info_of_record_fields, mut fn_names) = extract_and_parse(ctx, AssistType::Set)?;
+
+ // No record fields to do work on :(
+ if info_of_record_fields.len() == 0 {
+ return None;
+ }
+
+ // Prepend set_ to fn names.
+ fn_names.iter_mut().for_each(|name| *name = format!("set_{}", name));
+
+ // Return early if we've found an existing fn
+ let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?;
+
+ // Computing collective text range of all record fields in selected region
+ let target: TextRange = info_of_record_fields
+ .iter()
+ .map(|record_field_info| record_field_info.target)
+ .reduce(|acc, target| acc.cover(target))?;
+
+ let setter_info = AssistInfo { impl_def, strukt, assist_type: AssistType::Set };
+
+ acc.add_group(
+ &GroupLabel("Generate getter/setter".to_owned()),
+ AssistId("generate_setter", AssistKind::Generate),
+ "Generate a setter method",
+ target,
+ |builder| build_source_change(builder, ctx, info_of_record_fields, setter_info),
+ );
+ Some(())
+}
+
// Assist: generate_getter
//
// Generate a getter method.
@@ -83,10 +143,16 @@ struct RecordFieldInfo {
target: TextRange,
}
-struct GetterInfo {
+struct AssistInfo {
impl_def: Option<ast::Impl>,
strukt: ast::Struct,
- mutable: bool,
+ assist_type: AssistType,
+}
+
+enum AssistType {
+ Get,
+ MutGet,
+ Set,
}
pub(crate) fn generate_getter_impl(
@@ -94,40 +160,8 @@ pub(crate) fn generate_getter_impl(
ctx: &AssistContext<'_>,
mutable: bool,
) -> Option<()> {
- // This if condition denotes two modes this assist can work in:
- // - First is acting upon selection of record fields
- // - Next is acting upon a single record field
- //
- // This is the only part where implementation diverges a bit,
- // subsequent code is generic for both of these modes
-
- let (strukt, info_of_record_fields, fn_names) = if !ctx.has_empty_selection() {
- // Selection Mode
- let node = ctx.covering_element();
-
- let node = match node {
- syntax::NodeOrToken::Node(n) => n,
- syntax::NodeOrToken::Token(t) => t.parent()?,
- };
-
- let parent_struct = node.ancestors().find_map(ast::Struct::cast)?;
-
- let (info_of_record_fields, field_names) =
- extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), mutable)?;
-
- (parent_struct, info_of_record_fields, field_names)
- } else {
- // Single Record Field mode
- let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
- let field = ctx.find_node_at_offset::<ast::RecordField>()?;
-
- let record_field_info = parse_record_field(field, mutable)?;
-
- let fn_name = record_field_info.fn_name.clone();
-
- (strukt, vec![record_field_info], vec![fn_name])
- };
-
+ let (strukt, info_of_record_fields, fn_names) =
+ extract_and_parse(ctx, if mutable { AssistType::MutGet } else { AssistType::Get })?;
// No record fields to do work on :(
if info_of_record_fields.len() == 0 {
return None;
@@ -147,98 +181,30 @@ pub(crate) fn generate_getter_impl(
.map(|record_field_info| record_field_info.target)
.reduce(|acc, target| acc.cover(target))?;
- let getter_info = GetterInfo { impl_def, strukt, mutable };
+ let getter_info = AssistInfo {
+ impl_def,
+ strukt,
+ assist_type: if mutable { AssistType::MutGet } else { AssistType::Get },
+ };
acc.add_group(
&GroupLabel("Generate getter/setter".to_owned()),
AssistId(id, AssistKind::Generate),
label,
target,
- |builder| {
- let record_fields_count = info_of_record_fields.len();
-
- let mut buf = String::with_capacity(512);
-
- // Check if an impl exists
- if let Some(impl_def) = &getter_info.impl_def {
- // Check if impl is empty
- if let Some(assoc_item_list) = impl_def.assoc_item_list() {
- if assoc_item_list.assoc_items().next().is_some() {
- // If not empty then only insert a new line
- buf.push('\n');
- }
- }
- }
-
- for (i, record_field_info) in info_of_record_fields.iter().enumerate() {
- // this buf inserts a newline at the end of a getter
- // automatically, if one wants to add one more newline
- // for separating it from other assoc items, that needs
- // to be handled separately
- let mut getter_buf =
- generate_getter_from_info(ctx, &getter_info, record_field_info);
-
- // Insert `$0` only for last getter we generate
- if i == record_fields_count - 1 {
- if ctx.config.snippet_cap.is_some() {
- getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
- }
- }
-
- // For first element we do not merge with '\n', as
- // that can be inserted by impl_def check defined
- // above, for other cases which are:
- //
- // - impl exists but it empty, here we would ideally
- // not want to keep newline between impl <struct> {
- // and fn <fn-name>() { line
- //
- // - next if impl itself does not exist, in this
- // case we ourselves generate a new impl and that
- // again ends up with the same reasoning as above
- // for not keeping newline
- if i == 0 {
- buf = buf + &getter_buf;
- } else {
- buf = buf + "\n" + &getter_buf;
- }
-
- // We don't insert a new line at the end of
- // last getter as it will end up in the end
- // of an impl where we would not like to keep
- // getter and end of impl ( i.e. `}` ) with an
- // extra line for no reason
- if i < record_fields_count - 1 {
- buf = buf + "\n";
- }
- }
-
- let start_offset = getter_info
- .impl_def
- .as_ref()
- .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf))
- .unwrap_or_else(|| {
- buf = generate_impl_text(&ast::Adt::Struct(getter_info.strukt.clone()), &buf);
- getter_info.strukt.syntax().text_range().end()
- });
-
- match ctx.config.snippet_cap {
- Some(cap) => builder.insert_snippet(cap, start_offset, buf),
- None => builder.insert(start_offset, buf),
- }
- },
+ |builder| build_source_change(builder, ctx, info_of_record_fields, getter_info),
)
}
fn generate_getter_from_info(
ctx: &AssistContext<'_>,
- info: &GetterInfo,
+ info: &AssistInfo,
record_field_info: &RecordFieldInfo,
) -> String {
let mut buf = String::with_capacity(512);
let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{v} "));
- let (ty, body) = if info.mutable {
+ let (ty, body) = if matches!(info.assist_type, AssistType::MutGet) {
(
format!("&mut {}", record_field_info.field_ty),
format!("&mut self.{}", record_field_info.field_name),
@@ -273,7 +239,7 @@ fn generate_getter_from_info(
}}",
vis,
record_field_info.fn_name,
- info.mutable.then_some("mut ").unwrap_or_default(),
+ matches!(info.assist_type, AssistType::MutGet).then_some("mut ").unwrap_or_default(),
ty,
body,
);
@@ -281,10 +247,58 @@ fn generate_getter_from_info(
buf
}
+fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> String {
+ let mut buf = String::with_capacity(512);
+ let strukt = &info.strukt;
+ let fn_name = &record_field_info.fn_name;
+ let field_ty = &record_field_info.field_ty;
+ let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
+ format_to!(
+ buf,
+ " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{
+ self.{fn_name} = {fn_name};
+ }}"
+ );
+
+ buf
+}
+
+fn extract_and_parse(
+ ctx: &AssistContext<'_>,
+ assist_type: AssistType,
+) -> Option<(ast::Struct, Vec<RecordFieldInfo>, Vec<String>)> {
+ // This if condition denotes two modes assists can work in:
+ // - First is acting upon selection of record fields
+ // - Next is acting upon a single record field
+ if !ctx.has_empty_selection() {
+ // Selection Mode
+ let node = ctx.covering_element();
+
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ let parent_struct = node.ancestors().find_map(ast::Struct::cast)?;
+
+ let (info_of_record_fields, field_names) =
+ extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), &assist_type)?;
+
+ return Some((parent_struct, info_of_record_fields, field_names));
+ }
+
+ // Single Record Field mode
+ let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
+ let field = ctx.find_node_at_offset::<ast::RecordField>()?;
+ let record_field_info = parse_record_field(field, &assist_type)?;
+ let fn_name = record_field_info.fn_name.clone();
+ Some((strukt, vec![record_field_info], vec![fn_name]))
+}
+
fn extract_and_parse_record_fields(
node: &ast::Struct,
selection_range: TextRange,
- mutable: bool,
+ assist_type: &AssistType,
) -> Option<(Vec<RecordFieldInfo>, Vec<String>)> {
let mut field_names: Vec<String> = vec![];
let field_list = node.field_list()?;
@@ -295,7 +309,7 @@ fn extract_and_parse_record_fields(
.fields()
.filter_map(|record_field| {
if selection_range.contains_range(record_field.syntax().text_range()) {
- let record_field_info = parse_record_field(record_field, mutable)?;
+ let record_field_info = parse_record_field(record_field, assist_type)?;
field_names.push(record_field_info.fn_name.clone());
return Some(record_field_info);
}
@@ -316,12 +330,15 @@ fn extract_and_parse_record_fields(
}
}
-fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<RecordFieldInfo> {
+fn parse_record_field(
+ record_field: ast::RecordField,
+ assist_type: &AssistType,
+) -> Option<RecordFieldInfo> {
let field_name = record_field.name()?;
let field_ty = record_field.ty()?;
let mut fn_name = to_lower_snake_case(&field_name.to_string());
- if mutable {
+ if matches!(assist_type, AssistType::MutGet) {
format_to!(fn_name, "_mut");
}
@@ -330,8 +347,89 @@ fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<R
Some(RecordFieldInfo { field_name, field_ty, fn_name, target })
}
+fn build_source_change(
+ builder: &mut SourceChangeBuilder,
+ ctx: &AssistContext<'_>,
+ info_of_record_fields: Vec<RecordFieldInfo>,
+ assist_info: AssistInfo,
+) {
+ let record_fields_count = info_of_record_fields.len();
+
+ let mut buf = String::with_capacity(512);
+
+ // Check if an impl exists
+ if let Some(impl_def) = &assist_info.impl_def {
+ // Check if impl is empty
+ if let Some(assoc_item_list) = impl_def.assoc_item_list() {
+ if assoc_item_list.assoc_items().next().is_some() {
+ // If not empty then only insert a new line
+ buf.push('\n');
+ }
+ }
+ }
+
+ for (i, record_field_info) in info_of_record_fields.iter().enumerate() {
+ // this buf inserts a newline at the end of a getter
+ // automatically, if one wants to add one more newline
+ // for separating it from other assoc items, that needs
+ // to be handled separately
+ let mut getter_buf = match assist_info.assist_type {
+ AssistType::Set => generate_setter_from_info(&assist_info, record_field_info),
+ _ => generate_getter_from_info(ctx, &assist_info, record_field_info),
+ };
+
+ // Insert `$0` only for last getter we generate
+ if i == record_fields_count - 1 {
+ if ctx.config.snippet_cap.is_some() {
+ getter_buf = getter_buf.replacen("fn ", "fn $0", 1);
+ }
+ }
+
+ // For first element we do not merge with '\n', as
+ // that can be inserted by impl_def check defined
+ // above, for other cases which are:
+ //
+ // - impl exists but it empty, here we would ideally
+ // not want to keep newline between impl <struct> {
+ // and fn <fn-name>() { line
+ //
+ // - next if impl itself does not exist, in this
+ // case we ourselves generate a new impl and that
+ // again ends up with the same reasoning as above
+ // for not keeping newline
+ if i == 0 {
+ buf = buf + &getter_buf;
+ } else {
+ buf = buf + "\n" + &getter_buf;
+ }
+
+ // We don't insert a new line at the end of
+ // last getter as it will end up in the end
+ // of an impl where we would not like to keep
+ // getter and end of impl ( i.e. `}` ) with an
+ // extra line for no reason
+ if i < record_fields_count - 1 {
+ buf = buf + "\n";
+ }
+ }
+
+ let start_offset = assist_info
+ .impl_def
+ .as_ref()
+ .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(&ast::Adt::Struct(assist_info.strukt.clone()), &buf);
+ assist_info.strukt.syntax().text_range().end()
+ });
+
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, start_offset, buf),
+ None => builder.insert(start_offset, buf),
+ }
+}
+
#[cfg(test)]
-mod tests {
+mod tests_getter {
use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable};
use super::*;
@@ -812,3 +910,105 @@ impl Context {
);
}
}
+
+#[cfg(test)]
+mod tests_setter {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ fn check_not_applicable(ra_fixture: &str) {
+ check_assist_not_applicable(generate_setter, ra_fixture)
+ }
+
+ #[test]
+ fn test_generate_setter_from_field() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn $0set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_already_implemented() {
+ check_not_applicable(
+ r#"
+struct Person<T: Clone> {
+ dat$0a: T,
+}
+
+impl<T: Clone> Person<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_generate_setter_from_field_with_visibility_marker() {
+ check_assist(
+ generate_setter,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ dat$0a: T,
+}"#,
+ r#"
+pub(crate) struct Person<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Person<T> {
+ pub(crate) fn $0set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generate_setter() {
+ check_assist(
+ generate_setter,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ cou$0nt: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+}"#,
+ r#"
+struct Context<T: Clone> {
+ data: T,
+ count: usize,
+}
+
+impl<T: Clone> Context<T> {
+ fn set_data(&mut self, data: T) {
+ self.data = data;
+ }
+
+ fn $0set_count(&mut self, count: usize) {
+ self.count = count;
+ }
+}"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
deleted file mode 100644
index 62f72df1c..000000000
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs
+++ /dev/null
@@ -1,175 +0,0 @@
-use stdx::{format_to, to_lower_snake_case};
-use syntax::ast::{self, AstNode, HasName, HasVisibility};
-
-use crate::{
- utils::{find_impl_block_end, find_struct_impl, generate_impl_text},
- AssistContext, AssistId, AssistKind, Assists, GroupLabel,
-};
-
-// Assist: generate_setter
-//
-// Generate a setter method.
-//
-// ```
-// struct Person {
-// nam$0e: String,
-// }
-// ```
-// ->
-// ```
-// struct Person {
-// name: String,
-// }
-//
-// impl Person {
-// fn set_name(&mut self, name: String) {
-// self.name = name;
-// }
-// }
-// ```
-pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
- let field = ctx.find_node_at_offset::<ast::RecordField>()?;
-
- let field_name = field.name()?;
- let field_ty = field.ty()?;
-
- // Return early if we've found an existing fn
- let fn_name = to_lower_snake_case(&field_name.to_string());
- let impl_def =
- find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[format!("set_{fn_name}")])?;
-
- let target = field.syntax().text_range();
- acc.add_group(
- &GroupLabel("Generate getter/setter".to_owned()),
- AssistId("generate_setter", AssistKind::Generate),
- "Generate a setter method",
- target,
- |builder| {
- let mut buf = String::with_capacity(512);
-
- if impl_def.is_some() {
- buf.push('\n');
- }
-
- let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} "));
- format_to!(
- buf,
- " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{
- self.{fn_name} = {fn_name};
- }}"
- );
-
- let start_offset = impl_def
- .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
- .unwrap_or_else(|| {
- buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf);
- strukt.syntax().text_range().end()
- });
-
- builder.insert(start_offset, buf);
- },
- )
-}
-
-#[cfg(test)]
-mod tests {
- use crate::tests::{check_assist, check_assist_not_applicable};
-
- use super::*;
-
- fn check_not_applicable(ra_fixture: &str) {
- check_assist_not_applicable(generate_setter, ra_fixture)
- }
-
- #[test]
- fn test_generate_setter_from_field() {
- check_assist(
- generate_setter,
- r#"
-struct Person<T: Clone> {
- dat$0a: T,
-}"#,
- r#"
-struct Person<T: Clone> {
- data: T,
-}
-
-impl<T: Clone> Person<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- );
- }
-
- #[test]
- fn test_generate_setter_already_implemented() {
- check_not_applicable(
- r#"
-struct Person<T: Clone> {
- dat$0a: T,
-}
-
-impl<T: Clone> Person<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- );
- }
-
- #[test]
- fn test_generate_setter_from_field_with_visibility_marker() {
- check_assist(
- generate_setter,
- r#"
-pub(crate) struct Person<T: Clone> {
- dat$0a: T,
-}"#,
- r#"
-pub(crate) struct Person<T: Clone> {
- data: T,
-}
-
-impl<T: Clone> Person<T> {
- pub(crate) fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- );
- }
-
- #[test]
- fn test_multiple_generate_setter() {
- check_assist(
- generate_setter,
- r#"
-struct Context<T: Clone> {
- data: T,
- cou$0nt: usize,
-}
-
-impl<T: Clone> Context<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-}"#,
- r#"
-struct Context<T: Clone> {
- data: T,
- count: usize,
-}
-
-impl<T: Clone> Context<T> {
- fn set_data(&mut self, data: T) {
- self.data = data;
- }
-
- fn set_count(&mut self, count: usize) {
- self.count = count;
- }
-}"#,
- );
- }
-}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
new file mode 100644
index 000000000..0f67380d1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs
@@ -0,0 +1,429 @@
+use crate::assist_context::{AssistContext, Assists};
+use ide_db::assists::AssistId;
+use syntax::{
+ ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility},
+ ted, AstNode, SyntaxKind,
+};
+
+// NOTES :
+// We generate erroneous code if a function is declared const (E0379)
+// This is left to the user to correct as our only option is to remove the
+// function completely which we should not be doing.
+
+// Assist: generate_trait_from_impl
+//
+// Generate trait for an already defined inherent impl and convert impl to a trait impl.
+//
+// ```
+// struct Foo<const N: usize>([i32; N]);
+//
+// macro_rules! const_maker {
+// ($t:ty, $v:tt) => {
+// const CONST: $t = $v;
+// };
+// }
+//
+// impl<const N: usize> Fo$0o<N> {
+// // Used as an associated constant.
+// const CONST_ASSOC: usize = N * 4;
+//
+// fn create() -> Option<()> {
+// Some(())
+// }
+//
+// const_maker! {i32, 7}
+// }
+// ```
+// ->
+// ```
+// struct Foo<const N: usize>([i32; N]);
+//
+// macro_rules! const_maker {
+// ($t:ty, $v:tt) => {
+// const CONST: $t = $v;
+// };
+// }
+//
+// trait ${0:TraitName}<const N: usize> {
+// // Used as an associated constant.
+// const CONST_ASSOC: usize = N * 4;
+//
+// fn create() -> Option<()>;
+//
+// const_maker! {i32, 7}
+// }
+//
+// impl<const N: usize> ${0:TraitName}<N> for Foo<N> {
+// // Used as an associated constant.
+// const CONST_ASSOC: usize = N * 4;
+//
+// fn create() -> Option<()> {
+// Some(())
+// }
+//
+// const_maker! {i32, 7}
+// }
+// ```
+pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // Get AST Node
+ let impl_ast = ctx.find_node_at_offset::<ast::Impl>()?;
+
+ // Check if cursor is to the left of assoc item list's L_CURLY.
+ // if no L_CURLY then return.
+ let l_curly = impl_ast.assoc_item_list()?.l_curly_token()?;
+
+ let cursor_offset = ctx.offset();
+ let l_curly_offset = l_curly.text_range();
+ if cursor_offset >= l_curly_offset.start() {
+ return None;
+ }
+
+ // If impl is not inherent then we don't really need to go any further.
+ if impl_ast.for_token().is_some() {
+ return None;
+ }
+
+ let assoc_items = impl_ast.assoc_item_list()?;
+ let first_element = assoc_items.assoc_items().next();
+ if first_element.is_none() {
+ // No reason for an assist.
+ return None;
+ }
+
+ let impl_name = impl_ast.self_ty()?;
+
+ acc.add(
+ AssistId("generate_trait_from_impl", ide_db::assists::AssistKind::Generate),
+ "Generate trait from impl",
+ impl_ast.syntax().text_range(),
+ |builder| {
+ let trait_items = assoc_items.clone_for_update();
+ let impl_items = assoc_items.clone_for_update();
+
+ trait_items.assoc_items().for_each(|item| {
+ strip_body(&item);
+ remove_items_visibility(&item);
+ });
+
+ impl_items.assoc_items().for_each(|item| {
+ remove_items_visibility(&item);
+ });
+
+ let trait_ast = make::trait_(
+ false,
+ "NewTrait",
+ impl_ast.generic_param_list(),
+ impl_ast.where_clause(),
+ trait_items,
+ );
+
+ // Change `impl Foo` to `impl NewTrait for Foo`
+ let arg_list = if let Some(genpars) = impl_ast.generic_param_list() {
+ genpars.to_generic_args().to_string()
+ } else {
+ "".to_string()
+ };
+
+ if let Some(snippet_cap) = ctx.config.snippet_cap {
+ builder.replace_snippet(
+ snippet_cap,
+ impl_name.syntax().text_range(),
+ format!("${{0:TraitName}}{} for {}", arg_list, impl_name.to_string()),
+ );
+
+ // Insert trait before TraitImpl
+ builder.insert_snippet(
+ snippet_cap,
+ impl_ast.syntax().text_range().start(),
+ format!(
+ "{}\n\n{}",
+ trait_ast.to_string().replace("NewTrait", "${0:TraitName}"),
+ IndentLevel::from_node(impl_ast.syntax())
+ ),
+ );
+ } else {
+ builder.replace(
+ impl_name.syntax().text_range(),
+ format!("NewTrait{} for {}", arg_list, impl_name.to_string()),
+ );
+
+ // Insert trait before TraitImpl
+ builder.insert(
+ impl_ast.syntax().text_range().start(),
+ format!(
+ "{}\n\n{}",
+ trait_ast.to_string(),
+ IndentLevel::from_node(impl_ast.syntax())
+ ),
+ );
+ }
+
+ builder.replace(assoc_items.syntax().text_range(), impl_items.to_string());
+ },
+ );
+
+ Some(())
+}
+
+/// `E0449` Trait items always share the visibility of their trait
+fn remove_items_visibility(item: &ast::AssocItem) {
+ match item {
+ ast::AssocItem::Const(c) => {
+ if let Some(vis) = c.visibility() {
+ ted::remove(vis.syntax());
+ }
+ }
+ ast::AssocItem::Fn(f) => {
+ if let Some(vis) = f.visibility() {
+ ted::remove(vis.syntax());
+ }
+ }
+ ast::AssocItem::TypeAlias(t) => {
+ if let Some(vis) = t.visibility() {
+ ted::remove(vis.syntax());
+ }
+ }
+ _ => (),
+ }
+}
+
+fn strip_body(item: &ast::AssocItem) {
+ match item {
+ ast::AssocItem::Fn(f) => {
+ if let Some(body) = f.body() {
+ // In constrast to function bodies, we want to see no ws before a semicolon.
+ // So let's remove them if we see any.
+ if let Some(prev) = body.syntax().prev_sibling_or_token() {
+ if prev.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(prev);
+ }
+ }
+
+ ted::replace(body.syntax(), make::tokens::semicolon());
+ }
+ }
+ _ => (),
+ };
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable};
+
+ #[test]
+ fn test_trigger_when_cursor_on_header() {
+ check_assist_not_applicable(
+ generate_trait_from_impl,
+ r#"
+struct Foo(f64);
+
+impl Foo { $0
+ fn add(&mut self, x: f64) {
+ self.0 += x;
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_item_fn() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo(f64);
+
+impl F$0oo {
+ fn add(&mut self, x: f64) {
+ self.0 += x;
+ }
+}"#,
+ r#"
+struct Foo(f64);
+
+trait NewTrait {
+ fn add(&mut self, x: f64);
+}
+
+impl NewTrait for Foo {
+ fn add(&mut self, x: f64) {
+ self.0 += x;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_item_macro() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo;
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+impl F$0oo {
+ const_maker! {i32, 7}
+}"#,
+ r#"
+struct Foo;
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+trait NewTrait {
+ const_maker! {i32, 7}
+}
+
+impl NewTrait for Foo {
+ const_maker! {i32, 7}
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_item_const() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo;
+
+impl F$0oo {
+ const ABC: i32 = 3;
+}"#,
+ r#"
+struct Foo;
+
+trait NewTrait {
+ const ABC: i32 = 3;
+}
+
+impl NewTrait for Foo {
+ const ABC: i32 = 3;
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_impl_with_generics() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+impl<const N: usize> F$0oo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+trait NewTrait<const N: usize> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+
+impl<const N: usize> NewTrait<N> for Foo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_trait_items_should_not_have_vis() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+struct Foo;
+
+impl F$0oo {
+ pub fn a_func() -> Option<()> {
+ Some(())
+ }
+}"#,
+ r#"
+struct Foo;
+
+trait NewTrait {
+ fn a_func() -> Option<()>;
+}
+
+impl NewTrait for Foo {
+ fn a_func() -> Option<()> {
+ Some(())
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_empty_inherent_impl() {
+ check_assist_not_applicable(
+ generate_trait_from_impl,
+ r#"
+impl Emp$0tyImpl{}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_not_top_level_impl() {
+ check_assist_no_snippet_cap(
+ generate_trait_from_impl,
+ r#"
+mod a {
+ impl S$0 {
+ fn foo() {}
+ }
+}"#,
+ r#"
+mod a {
+ trait NewTrait {
+ fn foo();
+ }
+
+ impl NewTrait for S {
+ fn foo() {}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_snippet_cap_is_some() {
+ check_assist(
+ generate_trait_from_impl,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+impl<const N: usize> F$0oo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ r#"
+struct Foo<const N: usize>([i32; N]);
+
+trait ${0:TraitName}<const N: usize> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+
+impl<const N: usize> ${0:TraitName}<N> for Foo<N> {
+ // Used as an associated constant.
+ const CONST: usize = N * 4;
+}
+ "#,
+ )
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
index 797180fa1..ffab58509 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
@@ -15,7 +15,7 @@ use ide_db::{
};
use itertools::{izip, Itertools};
use syntax::{
- ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
+ ast::{self, edit::IndentLevel, edit_in_place::Indent, HasArgList, PathExpr},
ted, AstNode, NodeOrToken, SyntaxKind,
};
@@ -80,7 +80,7 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let is_recursive_fn = usages
.clone()
- .in_scope(SearchScope::file_range(FileRange {
+ .in_scope(&SearchScope::file_range(FileRange {
file_id: def_file,
range: func_body.syntax().text_range(),
}))
@@ -306,7 +306,7 @@ fn inline(
params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
CallInfo { node, arguments, generic_arg_list }: &CallInfo,
) -> ast::Expr {
- let body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
+ let mut body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
cov_mark::hit!(inline_call_defined_in_macro);
if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) {
body
@@ -391,19 +391,19 @@ fn inline(
}
}
+ let mut let_stmts = Vec::new();
+
// Inline parameter expressions or generate `let` statements depending on whether inlining works or not.
- for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() {
+ for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments) {
// izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
let usages: &[ast::PathExpr] = &usages;
let expr: &ast::Expr = expr;
- let insert_let_stmt = || {
+ let mut insert_let_stmt = || {
let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone());
- if let Some(stmt_list) = body.stmt_list() {
- stmt_list.push_front(
- make::let_stmt(pat.clone(), ty, Some(expr.clone())).clone_for_update().into(),
- )
- }
+ let_stmts.push(
+ make::let_stmt(pat.clone(), ty, Some(expr.clone())).clone_for_update().into(),
+ );
};
// check if there is a local var in the function that conflicts with parameter
@@ -457,6 +457,24 @@ fn inline(
}
}
+ let is_async_fn = function.is_async(sema.db);
+ if is_async_fn {
+ cov_mark::hit!(inline_call_async_fn);
+ body = make::async_move_block_expr(body.statements(), body.tail_expr()).clone_for_update();
+
+ // Arguments should be evaluated outside the async block, and then moved into it.
+ if !let_stmts.is_empty() {
+ cov_mark::hit!(inline_call_async_fn_with_let_stmts);
+ body.indent(IndentLevel(1));
+ body = make::block_expr(let_stmts, Some(body.into())).clone_for_update();
+ }
+ } else if let Some(stmt_list) = body.stmt_list() {
+ ted::insert_all(
+ ted::Position::after(stmt_list.l_curly_token().unwrap()),
+ let_stmts.into_iter().map(|stmt| stmt.syntax().clone().into()).collect(),
+ );
+ }
+
let original_indentation = match node {
ast::CallableExpr::Call(it) => it.indent_level(),
ast::CallableExpr::MethodCall(it) => it.indent_level(),
@@ -464,7 +482,7 @@ fn inline(
body.reindent_to(original_indentation);
match body.tail_expr() {
- Some(expr) if body.statements().next().is_none() => expr,
+ Some(expr) if !is_async_fn && body.statements().next().is_none() => expr,
_ => match node
.syntax()
.parent()
@@ -1353,4 +1371,107 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn async_fn_single_expression() {
+ cov_mark::check!(inline_call_async_fn);
+ check_assist(
+ inline_call,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await * 2
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(foo$0(42));
+}
+"#,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await * 2
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(async move {
+ bar(42).await * 2
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn async_fn_multiple_statements() {
+ cov_mark::check!(inline_call_async_fn);
+ check_assist(
+ inline_call,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await;
+ 42
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(foo$0(42));
+}
+"#,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(arg: u32) -> u32 {
+ bar(arg).await;
+ 42
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ spawn(async move {
+ bar(42).await;
+ 42
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn async_fn_with_let_statements() {
+ cov_mark::check!(inline_call_async_fn);
+ cov_mark::check!(inline_call_async_fn_with_let_stmts);
+ check_assist(
+ inline_call,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(x: u32, y: u32, z: &u32) -> u32 {
+ bar(x).await;
+ y + y + *z
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ let var = 42;
+ spawn(foo$0(var, var + 1, &var));
+}
+"#,
+ r#"
+async fn bar(x: u32) -> u32 { x + 1 }
+async fn foo(x: u32, y: u32, z: &u32) -> u32 {
+ bar(x).await;
+ y + y + *z
+}
+fn spawn<T>(_: T) {}
+fn main() {
+ let var = 42;
+ spawn({
+ let y = var + 1;
+ let z: &u32 = &var;
+ async move {
+ bar(var).await;
+ y + y + *z
+ }
+ });
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
index 5aa8e56f5..5d956b1a5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
@@ -37,11 +37,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let expanded = insert_ws_into(ctx.sema.expand(&unexpanded)?.clone_for_update());
-
let text_range = unexpanded.syntax().text_range();
acc.add(
- AssistId("inline_macro", AssistKind::RefactorRewrite),
+ AssistId("inline_macro", AssistKind::RefactorInline),
format!("Inline macro"),
text_range,
|builder| builder.replace(text_range, expanded.to_string()),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
index b6027eac5..22d536b5a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -54,7 +54,11 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// NOTE: We can technically provide this assist for default methods in trait definitions, but
// it's somewhat complex to handle it correctly when the const's name conflicts with
// supertrait's item. We may want to consider implementing it in the future.
- let AssocItemContainer::Impl(impl_) = ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db) else { return None; };
+ let AssocItemContainer::Impl(impl_) =
+ ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db)
+ else {
+ return None;
+ };
if impl_.trait_(db).is_some() {
return None;
}
@@ -78,17 +82,19 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
return None;
}
- let usages =
- Definition::Const(def).usages(&ctx.sema).in_scope(SearchScope::file_range(FileRange {
- file_id: ctx.file_id(),
- range: parent_fn.syntax().text_range(),
- }));
-
acc.add(
AssistId("move_const_to_impl", crate::AssistKind::RefactorRewrite),
"Move const to impl block",
const_.syntax().text_range(),
|builder| {
+ let usages = Definition::Const(def)
+ .usages(&ctx.sema)
+ .in_scope(&SearchScope::file_range(FileRange {
+ file_id: ctx.file_id(),
+ range: parent_fn.syntax().text_range(),
+ }))
+ .all();
+
let range_to_delete = match const_.syntax().next_sibling_or_token() {
Some(s) if matches!(s.kind(), SyntaxKind::WHITESPACE) => {
// Remove following whitespaces too.
@@ -99,7 +105,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
builder.delete(range_to_delete);
let const_ref = format!("Self::{}", name.display(ctx.db()));
- for range in usages.all().file_ranges().map(|it| it.range) {
+ for range in usages.file_ranges().map(|it| it.range) {
builder.replace(range, const_ref.clone());
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 23153b4c5..5cc110cf1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -8,13 +8,10 @@ use ide_db::{
use stdx::to_upper_snake_case;
use syntax::{
ast::{self, make, HasName},
- AstNode, WalkEvent,
+ ted, AstNode, WalkEvent,
};
-use crate::{
- assist_context::{AssistContext, Assists},
- utils::{render_snippet, Cursor},
-};
+use crate::assist_context::{AssistContext, Assists};
// Assist: promote_local_to_const
//
@@ -70,29 +67,33 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>)
cov_mark::hit!(promote_local_non_const);
return None;
}
- let target = let_stmt.syntax().text_range();
+
acc.add(
AssistId("promote_local_to_const", AssistKind::Refactor),
"Promote local to constant",
- target,
- |builder| {
+ let_stmt.syntax().text_range(),
+ |edit| {
let name = to_upper_snake_case(&name.to_string());
let usages = Definition::Local(local).usages(&ctx.sema).all();
if let Some(usages) = usages.references.get(&ctx.file_id()) {
+ let name = make::name_ref(&name);
+
for usage in usages {
- builder.replace(usage.range, &name);
+ let Some(usage) = usage.name.as_name_ref().cloned() else { continue };
+ let usage = edit.make_mut(usage);
+ ted::replace(usage.syntax(), name.clone_for_update().syntax());
}
}
- let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer);
- match ctx.config.snippet_cap.zip(item.name()) {
- Some((cap, name)) => builder.replace_snippet(
- cap,
- target,
- render_snippet(cap, item.syntax(), Cursor::Before(name.syntax())),
- ),
- None => builder.replace(target, item.to_string()),
+ let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer)
+ .clone_for_update();
+ let let_stmt = edit.make_mut(let_stmt);
+
+ if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) {
+ edit.add_tabstop_before(cap, name);
}
+
+ ted::replace(let_stmt.syntax(), item.syntax());
},
)
}
@@ -158,6 +159,27 @@ fn foo() {
}
#[test]
+ fn multiple_uses() {
+ check_assist(
+ promote_local_to_const,
+ r"
+fn foo() {
+ let x$0 = 0;
+ let y = x;
+ let z = (x, x, x, x);
+}
+",
+ r"
+fn foo() {
+ const $0X: i32 = 0;
+ let y = X;
+ let z = (X, X, X, X);
+}
+",
+ );
+ }
+
+ #[test]
fn not_applicable_non_const_meth_call() {
cov_mark::check!(promote_local_non_const);
check_assist_not_applicable(
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
index a5c7fea40..f222b3eb9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs
@@ -102,7 +102,7 @@ struct AssignmentsCollector<'a> {
assignments: Vec<(ast::BinExpr, ast::Expr)>,
}
-impl<'a> AssignmentsCollector<'a> {
+impl AssignmentsCollector<'_> {
fn collect_match(&mut self, match_expr: &ast::MatchExpr) -> Option<()> {
for arm in match_expr.match_arm_list()?.arms() {
match arm.expr()? {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
new file mode 100644
index 000000000..dd4839351
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -0,0 +1,739 @@
+use std::collections::{hash_map::Entry, HashMap};
+
+use hir::{InFile, Module, ModuleSource};
+use ide_db::{
+ base_db::FileRange,
+ defs::Definition,
+ search::{FileReference, ReferenceCategory, SearchScope},
+ RootDatabase,
+};
+use syntax::{ast, AstNode};
+use text_edit::TextRange;
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: remove_unused_imports
+//
+// Removes any use statements in the current selection that are unused.
+//
+// ```
+// struct X();
+// mod foo {
+// use super::X$0;
+// }
+// ```
+// ->
+// ```
+// struct X();
+// mod foo {
+// }
+// ```
+pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ // First, grab the uses that intersect with the current selection.
+ let selected_el = match ctx.covering_element() {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ // This applies to all uses that are selected, or are ancestors of our selection.
+ let uses_up = selected_el.ancestors().skip(1).filter_map(ast::Use::cast);
+ let uses_down = selected_el
+ .descendants()
+ .filter(|x| x.text_range().intersect(ctx.selection_trimmed()).is_some())
+ .filter_map(ast::Use::cast);
+ let uses = uses_up.chain(uses_down).collect::<Vec<_>>();
+
+ // Maps use nodes to the scope that we should search through to find
+ let mut search_scopes = HashMap::<Module, Vec<SearchScope>>::new();
+
+ // iterator over all unused use trees
+ let mut unused = uses
+ .into_iter()
+ .flat_map(|u| u.syntax().descendants().filter_map(ast::UseTree::cast))
+ .filter(|u| u.use_tree_list().is_none())
+ .filter_map(|u| {
+ // Find any uses trees that are unused
+
+ let use_module = ctx.sema.scope(&u.syntax()).map(|s| s.module())?;
+ let scope = match search_scopes.entry(use_module) {
+ Entry::Occupied(o) => o.into_mut(),
+ Entry::Vacant(v) => v.insert(module_search_scope(ctx.db(), use_module)),
+ };
+
+ // Gets the path associated with this use tree. If there isn't one, then ignore this use tree.
+ let path = if let Some(path) = u.path() {
+ path
+ } else if u.star_token().is_some() {
+ // This case maps to the situation where the * token is braced.
+ // In this case, the parent use tree's path is the one we should use to resolve the glob.
+ match u.syntax().ancestors().skip(1).find_map(ast::UseTree::cast) {
+ Some(parent_u) if parent_u.path().is_some() => parent_u.path().unwrap(),
+ _ => return None,
+ }
+ } else {
+ return None;
+ };
+
+ // Get the actual definition associated with this use item.
+ let res = match ctx.sema.resolve_path(&path) {
+ Some(x) => x,
+ None => {
+ return None;
+ }
+ };
+
+ let def = match res {
+ hir::PathResolution::Def(d) => Definition::from(d),
+ _ => return None,
+ };
+
+ if u.star_token().is_some() {
+ // Check if any of the children of this module are used
+ let def_mod = match def {
+ Definition::Module(module) => module,
+ _ => return None,
+ };
+
+ if !def_mod
+ .scope(ctx.db(), Some(use_module))
+ .iter()
+ .filter_map(|(_, x)| match x {
+ hir::ScopeDef::ModuleDef(d) => Some(Definition::from(*d)),
+ _ => None,
+ })
+ .any(|d| used_once_in_scope(ctx, d, scope))
+ {
+ return Some(u);
+ }
+ } else if let Definition::Trait(ref t) = def {
+ // If the trait or any item is used.
+ if !std::iter::once(def)
+ .chain(t.items(ctx.db()).into_iter().map(Definition::from))
+ .any(|d| used_once_in_scope(ctx, d, scope))
+ {
+ return Some(u);
+ }
+ } else {
+ if !used_once_in_scope(ctx, def, &scope) {
+ return Some(u);
+ }
+ }
+
+ None
+ })
+ .peekable();
+
+ // Peek so we terminate early if an unused use is found. Only do the rest of the work if the user selects the assist.
+ if unused.peek().is_some() {
+ acc.add(
+ AssistId("remove_unused_imports", AssistKind::QuickFix),
+ "Remove all the unused imports",
+ selected_el.text_range(),
+ |builder| {
+ let unused: Vec<ast::UseTree> = unused.map(|x| builder.make_mut(x)).collect();
+ for node in unused {
+ node.remove_recursive();
+ }
+ },
+ )
+ } else {
+ None
+ }
+}
+
+fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<SearchScope>) -> bool {
+ let mut found = false;
+
+ for scope in scopes {
+ let mut search_non_import = |_, r: FileReference| {
+ // The import itself is a use; we must skip that.
+ if r.category != Some(ReferenceCategory::Import) {
+ found = true;
+ true
+ } else {
+ false
+ }
+ };
+ def.usages(&ctx.sema).in_scope(scope).search(&mut search_non_import);
+ if found {
+ break;
+ }
+ }
+
+ found
+}
+
+/// Build a search scope spanning the given module but none of its submodules.
+fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
+ let (file_id, range) = {
+ let InFile { file_id, value } = module.definition_source(db);
+ if let Some((file_id, call_source)) = file_id.original_call_node(db) {
+ (file_id, Some(call_source.text_range()))
+ } else {
+ (
+ file_id.original_file(db),
+ match value {
+ ModuleSource::SourceFile(_) => None,
+ ModuleSource::Module(it) => Some(it.syntax().text_range()),
+ ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
+ },
+ )
+ }
+ };
+
+ fn split_at_subrange(first: TextRange, second: TextRange) -> (TextRange, Option<TextRange>) {
+ let intersect = first.intersect(second);
+ if let Some(intersect) = intersect {
+ let start_range = TextRange::new(first.start(), intersect.start());
+
+ if intersect.end() < first.end() {
+ (start_range, Some(TextRange::new(intersect.end(), first.end())))
+ } else {
+ (start_range, None)
+ }
+ } else {
+ (first, None)
+ }
+ }
+
+ let mut scopes = Vec::new();
+ if let Some(range) = range {
+ let mut ranges = vec![range];
+
+ for child in module.children(db) {
+ let rng = match child.definition_source(db).value {
+ ModuleSource::SourceFile(_) => continue,
+ ModuleSource::Module(it) => it.syntax().text_range(),
+ ModuleSource::BlockExpr(_) => continue,
+ };
+ let mut new_ranges = Vec::new();
+ for old_range in ranges.iter_mut() {
+ let split = split_at_subrange(old_range.clone(), rng);
+ *old_range = split.0;
+ new_ranges.extend(split.1);
+ }
+
+ ranges.append(&mut new_ranges);
+ }
+
+ for range in ranges {
+ scopes.push(SearchScope::file_range(FileRange { file_id, range }));
+ }
+ } else {
+ scopes.push(SearchScope::single_file(file_id));
+ }
+
+ scopes
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn remove_unused() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ $0use super::X;
+ use super::Y;$0
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_is_precise() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod z {
+$0use super::X;$0
+
+fn w() {
+ struct X();
+ let x = X();
+}
+}
+"#,
+ r#"
+struct X();
+mod z {
+
+fn w() {
+ struct X();
+ let x = X();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_name_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f();
+}
+
+impl Y for X {
+ fn f() {}
+}
+mod z {
+$0use super::X;
+use super::Y;$0
+
+fn w() {
+ X::f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn trait_item_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f(self);
+}
+
+impl Y for X {
+ fn f(self) {}
+}
+mod z {
+$0use super::X;
+use super::Y;$0
+
+fn w() {
+ let x = X();
+ x.f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ranamed_trait_item_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f(self);
+}
+
+impl Y for X {
+ fn f(self) {}
+}
+mod z {
+$0use super::X;
+use super::Y as Z;$0
+
+fn w() {
+ let x = X();
+ x.f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn ranamed_underscore_trait_item_use_is_use() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+trait Y {
+ fn f(self);
+}
+
+impl Y for X {
+ fn f(self) {}
+}
+mod z {
+$0use super::X;
+use super::Y as _;$0
+
+fn w() {
+ let x = X();
+ x.f();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn dont_remove_used() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+$0use super::X;
+use super::Y;$0
+
+fn w() {
+ let x = X();
+ let y = Y();
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_in_braces() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ $0use super::{X, Y};$0
+
+ fn w() {
+ let x = X();
+ }
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::{X};
+
+ fn w() {
+ let x = X();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_under_cursor() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod z {
+ use super::X$0;
+}
+"#,
+ r#"
+struct X();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_multi_use_block() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+$0mod y {
+ use super::X;
+}
+mod z {
+ use super::X;
+}$0
+"#,
+ r#"
+struct X();
+mod y {
+}
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_nested() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X, y::Y}$0;
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X};
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_nested_first_item() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X, y::Y}$0;
+ fn f() {
+ let y = Y();
+ }
+ }
+}
+"#,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{y::Y};
+ fn f() {
+ let y = Y();
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_nested_all_unused() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ use crate::{X, y::Y}$0;
+ }
+}
+"#,
+ r#"
+struct X();
+mod y {
+ struct Y();
+ mod z {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_glob() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::*$0;
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn remove_unused_braced_glob() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::{*}$0;
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn dont_remove_used_glob() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::*$0;
+
+ fn f() {
+ let x = X();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn only_remove_from_selection() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+struct Y();
+mod z {
+ $0use super::X;$0
+ use super::Y;
+}
+mod w {
+ use super::Y;
+}
+"#,
+ r#"
+struct X();
+struct Y();
+mod z {
+ use super::Y;
+}
+mod w {
+ use super::Y;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_several_files() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+//- /foo.rs
+pub struct X();
+pub struct Y();
+
+//- /main.rs
+$0use foo::X;
+use foo::Y;
+$0
+mod foo;
+mod z {
+ use crate::foo::X;
+}
+"#,
+ r#"
+
+mod foo;
+mod z {
+ use crate::foo::X;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn use_in_submodule_doesnt_count() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+struct X();
+mod z {
+ use super::X$0;
+
+ mod w {
+ use crate::X;
+
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ r#"
+struct X();
+mod z {
+
+ mod w {
+ use crate::X;
+
+ fn f() {
+ let x = X();
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn use_in_submodule_file_doesnt_count() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+//- /z/foo.rs
+use crate::X;
+fn f() {
+ let x = X();
+}
+
+//- /main.rs
+pub struct X();
+
+mod z {
+ use crate::X$0;
+ mod foo;
+}
+"#,
+ r#"
+pub struct X();
+
+mod z {
+ mod foo;
+}
+"#,
+ );
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 3bdd795be..ac45581b7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -10,7 +10,7 @@ use crate::{
assist_context::{AssistContext, Assists, SourceChangeBuilder},
utils::{
add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body,
- generate_trait_impl_text, render_snippet, Cursor, DefaultMethods,
+ generate_trait_impl_text, render_snippet, Cursor, DefaultMethods, IgnoreAssocItems,
},
AssistId, AssistKind,
};
@@ -73,7 +73,7 @@ pub(crate) fn replace_derive_with_manual_impl(
&ctx.sema,
current_crate,
NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
- items_locator::AssocItemSearch::Exclude,
+ items_locator::AssocSearchMode::Exclude,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|item| match item.as_module_def()? {
@@ -172,7 +172,17 @@ fn impl_def_from_trait(
) -> Option<(ast::Impl, ast::AssocItem)> {
let trait_ = trait_?;
let target_scope = sema.scope(annotated_name.syntax())?;
- let trait_items = filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No);
+
+ // Keep assoc items of local crates even if they have #[doc(hidden)] attr.
+ let ignore_items = if trait_.module(sema.db).krate().origin(sema.db).is_local() {
+ IgnoreAssocItems::No
+ } else {
+ IgnoreAssocItems::DocHiddenAttrPresent
+ };
+
+ let trait_items =
+ filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No, ignore_items);
+
if trait_items.is_empty() {
return None;
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
index e7b62d49b..c7c0be4c7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_named_generic_with_impl.rs
@@ -157,7 +157,7 @@ fn find_usages(
file_id: FileId,
) -> UsageSearchResult {
let file_range = FileRange { file_id, range: fn_.syntax().text_range() };
- type_param_def.usages(sema).in_scope(SearchScope::file_range(file_range)).all()
+ type_param_def.usages(sema).in_scope(&SearchScope::file_range(file_range)).all()
}
fn check_valid_usages(usages: &UsageSearchResult, param_list_range: TextRange) -> bool {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
index 26f3c1926..f235b554e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
@@ -38,14 +38,18 @@ pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<'
};
let type_ref = &ret_type.ty()?;
- let Some(hir::Adt::Enum(ret_enum)) = ctx.sema.resolve_type(type_ref)?.as_adt() else { return None; };
+ let Some(hir::Adt::Enum(ret_enum)) = ctx.sema.resolve_type(type_ref)?.as_adt() else {
+ return None;
+ };
let result_enum =
FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
if ret_enum != result_enum {
return None;
}
- let Some(ok_type) = unwrap_result_type(type_ref) else { return None; };
+ let Some(ok_type) = unwrap_result_type(type_ref) else {
+ return None;
+ };
acc.add(
AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite),
@@ -130,12 +134,16 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
// Tries to extract `T` from `Result<T, E>`.
fn unwrap_result_type(ty: &ast::Type) -> Option<ast::Type> {
- let ast::Type::PathType(path_ty) = ty else { return None; };
+ let ast::Type::PathType(path_ty) = ty else {
+ return None;
+ };
let path = path_ty.path()?;
let segment = path.first_segment()?;
let generic_arg_list = segment.generic_arg_list()?;
let generic_args: Vec<_> = generic_arg_list.generic_args().collect();
- let ast::GenericArg::TypeArg(ok_type) = generic_args.first()? else { return None; };
+ let ast::GenericArg::TypeArg(ok_type) = generic_args.first()? else {
+ return None;
+ };
ok_type.ty()
}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
index b6c489eb6..24c338745 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs
@@ -6,7 +6,7 @@ use ide_db::{
};
use syntax::{
ast::{self, make, Expr},
- match_ast, AstNode,
+ match_ast, ted, AstNode,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -52,8 +52,8 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite),
"Wrap return type in Result",
type_ref.syntax().text_range(),
- |builder| {
- let body = ast::Expr::BlockExpr(body);
+ |edit| {
+ let body = edit.make_mut(ast::Expr::BlockExpr(body));
let mut exprs_to_wrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
@@ -70,17 +70,24 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
let ok_wrapped = make::expr_call(
make::expr_path(make::ext::ident_path("Ok")),
make::arg_list(iter::once(ret_expr_arg.clone())),
- );
- builder.replace_ast(ret_expr_arg, ok_wrapped);
+ )
+ .clone_for_update();
+ ted::replace(ret_expr_arg.syntax(), ok_wrapped.syntax());
}
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snippet = format!("Result<{type_ref}, ${{0:_}}>");
- builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet)
- }
- None => builder
- .replace(type_ref.syntax().text_range(), format!("Result<{type_ref}, _>")),
+ let new_result_ty =
+ make::ext::ty_result(type_ref.clone(), make::ty_placeholder()).clone_for_update();
+ let old_result_ty = edit.make_mut(type_ref.clone());
+
+ ted::replace(old_result_ty.syntax(), new_result_ty.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ let generic_args = new_result_ty
+ .syntax()
+ .descendants()
+ .find_map(ast::GenericArgList::cast)
+ .unwrap();
+ edit.add_placeholder_snippet(cap, generic_args.generic_args().last().unwrap());
}
},
)
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index 111753bf3..2ebb5ef9b 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -145,6 +145,7 @@ mod handlers {
mod generate_constant;
mod generate_default_from_enum_variant;
mod generate_default_from_new;
+ mod generate_delegate_trait;
mod generate_deref;
mod generate_derive;
mod generate_documentation_template;
@@ -153,12 +154,12 @@ mod handlers {
mod generate_enum_variant;
mod generate_from_impl_for_enum;
mod generate_function;
- mod generate_getter;
+ mod generate_getter_or_setter;
mod generate_impl;
mod generate_is_empty_from_len;
mod generate_new;
- mod generate_setter;
mod generate_delegate_methods;
+ mod generate_trait_from_impl;
mod add_return_type;
mod inline_call;
mod inline_const_as_literal;
@@ -183,6 +184,7 @@ mod handlers {
mod raw_string;
mod remove_dbg;
mod remove_mut;
+ mod remove_unused_imports;
mod remove_unused_param;
mod remove_parentheses;
mod reorder_fields;
@@ -251,6 +253,7 @@ mod handlers {
generate_constant::generate_constant,
generate_default_from_enum_variant::generate_default_from_enum_variant,
generate_default_from_new::generate_default_from_new,
+ generate_delegate_trait::generate_delegate_trait,
generate_derive::generate_derive,
generate_documentation_template::generate_documentation_template,
generate_documentation_template::generate_doc_example,
@@ -264,6 +267,7 @@ mod handlers {
generate_impl::generate_trait_impl,
generate_is_empty_from_len::generate_is_empty_from_len,
generate_new::generate_new,
+ generate_trait_from_impl::generate_trait_from_impl,
inline_call::inline_call,
inline_call::inline_into_callers,
inline_const_as_literal::inline_const_as_literal,
@@ -291,6 +295,7 @@ mod handlers {
raw_string::make_usual_string,
raw_string::remove_hash,
remove_mut::remove_mut,
+ remove_unused_imports::remove_unused_imports,
remove_unused_param::remove_unused_param,
remove_parentheses::remove_parentheses,
reorder_fields::reorder_fields,
@@ -334,9 +339,9 @@ mod handlers {
extract_function::extract_function,
extract_module::extract_module,
//
- generate_getter::generate_getter,
- generate_getter::generate_getter_mut,
- generate_setter::generate_setter,
+ generate_getter_or_setter::generate_getter,
+ generate_getter_or_setter::generate_getter_mut,
+ generate_getter_or_setter::generate_setter,
generate_delegate_methods::generate_delegate_methods,
generate_deref::generate_deref,
//
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index 344f2bfcc..cc3e251a8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -132,8 +132,13 @@ fn check_doc_test(assist_id: &str, before: &str, after: &str) {
.filter(|it| !it.source_file_edits.is_empty() || !it.file_system_edits.is_empty())
.expect("Assist did not contain any source changes");
let mut actual = before;
- if let Some(source_file_edit) = source_change.get_source_edit(file_id) {
+ if let Some((source_file_edit, snippet_edit)) =
+ source_change.get_source_and_snippet_edit(file_id)
+ {
source_file_edit.apply(&mut actual);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut actual);
+ }
}
actual
};
@@ -191,9 +196,12 @@ fn check_with_config(
&& source_change.file_system_edits.len() == 0;
let mut buf = String::new();
- for (file_id, edit) in source_change.source_file_edits {
+ for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
let mut text = db.file_text(file_id).as_ref().to_owned();
edit.apply(&mut text);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut text);
+ }
if !skip_header {
let sr = db.file_source_root(file_id);
let sr = db.source_root(sr);
@@ -485,18 +493,21 @@ pub fn test_some_range(a: int) -> bool {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "let $0var_name = 5;\n ",
- delete: 45..45,
- },
- Indel {
- insert: "var_name",
- delete: 59..60,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: true,
@@ -544,18 +555,21 @@ pub fn test_some_range(a: int) -> bool {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "let $0var_name = 5;\n ",
- delete: 45..45,
- },
- Indel {
- insert: "var_name",
- delete: 59..60,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "let $0var_name = 5;\n ",
+ delete: 45..45,
+ },
+ Indel {
+ insert: "var_name",
+ delete: 59..60,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: true,
@@ -581,18 +595,21 @@ pub fn test_some_range(a: int) -> bool {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "fun_name()",
- delete: 59..60,
- },
- Indel {
- insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
- delete: 110..110,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "fun_name()",
+ delete: 59..60,
+ },
+ Indel {
+ insert: "\n\nfn $0fun_name() -> i32 {\n 5\n}",
+ delete: 110..110,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: true,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index c097e0739..6eadc3dbc 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -952,6 +952,7 @@ fn doctest_generate_default_from_new() {
check_doc_test(
"generate_default_from_new",
r#####"
+//- minicore: default
struct Example { _inner: () }
impl Example {
@@ -1016,6 +1017,69 @@ impl Person {
}
#[test]
+fn doctest_generate_delegate_trait() {
+ check_doc_test(
+ "generate_delegate_trait",
+ r#####"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a$0: A,
+}
+"#####,
+ r#####"
+trait SomeTrait {
+ type T;
+ fn fn_(arg: u32) -> u32;
+ fn method_(&mut self) -> bool;
+}
+struct A;
+impl SomeTrait for A {
+ type T = u32;
+
+ fn fn_(arg: u32) -> u32 {
+ 42
+ }
+
+ fn method_(&mut self) -> bool {
+ false
+ }
+}
+struct B {
+ a: A,
+}
+
+impl SomeTrait for B {
+ type T = <A as SomeTrait>::T;
+
+ fn fn_(arg: u32) -> u32 {
+ <A as SomeTrait>::fn_(arg)
+ }
+
+ fn method_(&mut self) -> bool {
+ <A as SomeTrait>::method_( &mut self.a )
+ }
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_deref() {
check_doc_test(
"generate_deref",
@@ -1429,7 +1493,7 @@ struct Person {
}
impl Person {
- fn set_name(&mut self, name: String) {
+ fn $0set_name(&mut self, name: String) {
self.name = name;
}
}
@@ -1438,6 +1502,62 @@ impl Person {
}
#[test]
+fn doctest_generate_trait_from_impl() {
+ check_doc_test(
+ "generate_trait_from_impl",
+ r#####"
+struct Foo<const N: usize>([i32; N]);
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+impl<const N: usize> Fo$0o<N> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()> {
+ Some(())
+ }
+
+ const_maker! {i32, 7}
+}
+"#####,
+ r#####"
+struct Foo<const N: usize>([i32; N]);
+
+macro_rules! const_maker {
+ ($t:ty, $v:tt) => {
+ const CONST: $t = $v;
+ };
+}
+
+trait ${0:TraitName}<const N: usize> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()>;
+
+ const_maker! {i32, 7}
+}
+
+impl<const N: usize> ${0:TraitName}<N> for Foo<N> {
+ // Used as an associated constant.
+ const CONST_ASSOC: usize = N * 4;
+
+ fn create() -> Option<()> {
+ Some(())
+ }
+
+ const_maker! {i32, 7}
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_generate_trait_impl() {
check_doc_test(
"generate_trait_impl",
@@ -2115,6 +2235,24 @@ fn main() {
}
#[test]
+fn doctest_remove_unused_imports() {
+ check_doc_test(
+ "remove_unused_imports",
+ r#####"
+struct X();
+mod foo {
+ use super::X$0;
+}
+"#####,
+ r#####"
+struct X();
+mod foo {
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_remove_unused_param() {
check_doc_test(
"remove_unused_param",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index 03d855350..a262570d9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -3,7 +3,7 @@
use std::ops;
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
-use hir::{db::HirDatabase, HirDisplay, InFile, Semantics};
+use hir::{db::HirDatabase, HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics};
use ide_db::{
famous_defs::FamousDefs, path_transform::PathTransform,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, SnippetCap,
@@ -84,6 +84,12 @@ pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
})
}
+#[derive(Clone, Copy, PartialEq)]
+pub enum IgnoreAssocItems {
+ DocHiddenAttrPresent,
+ No,
+}
+
#[derive(Copy, Clone, PartialEq)]
pub enum DefaultMethods {
Only,
@@ -94,11 +100,16 @@ pub fn filter_assoc_items(
sema: &Semantics<'_, RootDatabase>,
items: &[hir::AssocItem],
default_methods: DefaultMethods,
+ ignore_items: IgnoreAssocItems,
) -> Vec<InFile<ast::AssocItem>> {
return items
.iter()
- // Note: This throws away items with no source.
.copied()
+ .filter(|assoc_item| {
+ !(ignore_items == IgnoreAssocItems::DocHiddenAttrPresent
+ && assoc_item.attrs(sema.db).has_doc_hidden())
+ })
+ // Note: This throws away items with no source.
.filter_map(|assoc_item| {
let item = match assoc_item {
hir::AssocItem::Function(it) => sema.source(it)?.map(ast::AssocItem::Fn),
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
index 19bfd294b..62bdb6ee6 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs
@@ -42,10 +42,11 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
};
}
-const KNOWN_ARCH: [&str; 19] = [
+const KNOWN_ARCH: [&str; 20] = [
"aarch64",
"arm",
"avr",
+ "csky",
"hexagon",
"mips",
"mips64",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
index c717a9cb5..e411c1c86 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs
@@ -30,6 +30,8 @@ const SUPPORTED_CALLING_CONVENTIONS: &[&str] = &[
"efiapi",
"avr-interrupt",
"avr-non-blocking-interrupt",
+ "riscv-interrupt-m",
+ "riscv-interrupt-s",
"C-cmse-nonsecure-call",
"wasm",
"system",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
index d3e75c6da..1e0989405 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -42,7 +42,7 @@ pub(crate) fn complete_mod(
}
let module_definition_file =
- current_module.definition_source(ctx.db).file_id.original_file(ctx.db);
+ current_module.definition_source_file_id(ctx.db).original_file(ctx.db);
let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file));
let directory_to_look_for_submodules = directory_to_look_for_submodules(
current_module,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index 7b145f3c1..3cb65b272 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -463,7 +463,9 @@ impl CompletionContext<'_> {
/// Checks whether this item should be listed in regards to stability. Returns `true` if we should.
pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool {
- let Some(attrs) = attrs else { return true; };
+ let Some(attrs) = attrs else {
+ return true;
+ };
!attrs.is_unstable() || self.is_nightly
}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index cc5221cfc..3ea506590 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -243,10 +243,7 @@ fn analyze(
let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
- CompletionAnalysis::String {
- original,
- expanded: ast::String::cast(self_token.clone()),
- }
+ CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) }
} else {
// Fix up trailing whitespace problem
// #[attr(foo = $0
@@ -736,7 +733,7 @@ fn classify_name_ref(
return None;
}
let parent = match ast::Fn::cast(parent.parent()?) {
- Some(x) => x.param_list(),
+ Some(it) => it.param_list(),
None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
};
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
index e850f7bfd..0309952c2 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs
@@ -427,9 +427,26 @@ impl Builder {
let insert_text = self.insert_text.unwrap_or_else(|| label.to_string());
if !self.doc_aliases.is_empty() {
- let doc_aliases = self.doc_aliases.into_iter().join(", ");
+ let doc_aliases = self.doc_aliases.iter().join(", ");
label = SmolStr::from(format!("{label} (alias {doc_aliases})"));
- lookup = SmolStr::from(format!("{lookup} {doc_aliases}"));
+ let lookup_doc_aliases = self
+ .doc_aliases
+ .iter()
+ // Don't include aliases in `lookup` that aren't valid identifiers as including
+ // them results in weird completion filtering behavior e.g. `Partial>` matching
+ // `PartialOrd` because it has an alias of ">".
+ .filter(|alias| {
+ let mut chars = alias.chars();
+ chars.next().is_some_and(char::is_alphabetic)
+ && chars.all(|c| c.is_alphanumeric() || c == '_')
+ })
+ // Deliberately concatenated without separators as adding separators e.g.
+ // `alias1, alias2` results in LSP clients continuing to display the completion even
+ // after typing a comma or space.
+ .join("");
+ if !lookup_doc_aliases.is_empty() {
+ lookup = SmolStr::from(format!("{lookup}{lookup_doc_aliases}"));
+ }
}
if let [import_edit] = &*self.imports_to_add {
// snippets can have multiple imports, but normal completions only have up to one
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index 106d4e1e5..2eaa42040 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -231,7 +231,7 @@ pub fn resolve_completion_edits(
&sema,
current_crate,
NameToImport::exact_case_sensitive(imported_name),
- items_locator::AssocItemSearch::Include,
+ items_locator::AssocSearchMode::Include,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
);
let import = items_with_name
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
index c97144b61..1aaf39587 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs
@@ -300,6 +300,7 @@ struct Foo;
at deprecated
at derive macro derive
at derive(…)
+ at derive_const macro derive_const
at doc = "…"
at doc(alias = "…")
at doc(hidden)
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
index 382472083..e80a28904 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -1280,3 +1280,26 @@ fn here_we_go() {
"#]],
);
}
+
+#[test]
+fn completion_filtering_excludes_non_identifier_doc_aliases() {
+ check_edit(
+ "PartialOrdcmporder",
+ r#"
+#[doc(alias = ">")]
+#[doc(alias = "cmp")]
+#[doc(alias = "order")]
+trait PartialOrd {}
+
+struct Foo<T: Partial$0
+"#,
+ r#"
+#[doc(alias = ">")]
+#[doc(alias = "cmp")]
+#[doc(alias = "order")]
+trait PartialOrd {}
+
+struct Foo<T: PartialOrd
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index 4e75dc4db..faec74206 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -21,7 +21,7 @@ once_cell = "1.17.0"
either = "1.7.0"
itertools = "0.10.5"
arrayvec = "0.7.2"
-indexmap = "1.9.1"
+indexmap = "2.0.0"
memchr = "2.5.0"
triomphe.workspace = true
nohash-hasher.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index 0dd544d0a..a0b05c87a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -99,8 +99,8 @@ impl RootDatabase {
hir::db::AstIdMapQuery
hir::db::ParseMacroExpansionQuery
hir::db::InternMacroCallQuery
- hir::db::MacroArgTextQuery
- hir::db::MacroDefQuery
+ hir::db::MacroArgNodeQuery
+ hir::db::DeclMacroExpanderQuery
hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery
hir::db::HygieneFrameQuery
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index 760834bfa..5e4562d9c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -7,10 +7,10 @@
use arrayvec::ArrayVec;
use hir::{
- Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
- Function, GenericParam, HasVisibility, Impl, Label, Local, Macro, Module, ModuleDef, Name,
- PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, TypeAlias, Variant,
- Visibility,
+ Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper,
+ ExternCrateDecl, Field, Function, GenericParam, HasVisibility, Impl, Label, Local, Macro,
+ Module, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias,
+ TypeAlias, Variant, Visibility,
};
use stdx::impl_from;
use syntax::{
@@ -42,6 +42,7 @@ pub enum Definition {
DeriveHelper(DeriveHelper),
BuiltinAttr(BuiltinAttr),
ToolModule(ToolModule),
+ ExternCrateDecl(ExternCrateDecl),
}
impl Definition {
@@ -73,6 +74,7 @@ impl Definition {
Definition::Local(it) => it.module(db),
Definition::GenericParam(it) => it.module(db),
Definition::Label(it) => it.module(db),
+ Definition::ExternCrateDecl(it) => it.module(db),
Definition::DeriveHelper(it) => it.derive().module(db),
Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => {
return None
@@ -93,6 +95,7 @@ impl Definition {
Definition::TraitAlias(it) => it.visibility(db),
Definition::TypeAlias(it) => it.visibility(db),
Definition::Variant(it) => it.visibility(db),
+ Definition::ExternCrateDecl(it) => it.visibility(db),
Definition::BuiltinType(_) => Visibility::Public,
Definition::Macro(_) => return None,
Definition::BuiltinAttr(_)
@@ -127,6 +130,7 @@ impl Definition {
Definition::BuiltinAttr(_) => return None, // FIXME
Definition::ToolModule(_) => return None, // FIXME
Definition::DeriveHelper(it) => it.name(db),
+ Definition::ExternCrateDecl(it) => return it.alias_or_name(db),
};
Some(name)
}
@@ -196,6 +200,10 @@ impl IdentClass {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
+ IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand { decl, krate }) => {
+ res.push(Definition::ExternCrateDecl(decl));
+ res.push(Definition::Module(krate.root_module()));
+ }
IdentClass::Operator(
OperatorClass::Await(func)
| OperatorClass::Prefix(func)
@@ -222,6 +230,10 @@ impl IdentClass {
res.push(Definition::Local(local_ref));
res.push(Definition::Field(field_ref));
}
+ IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand { decl, krate }) => {
+ res.push(Definition::ExternCrateDecl(decl));
+ res.push(Definition::Module(krate.root_module()));
+ }
IdentClass::Operator(_) => (),
}
res
@@ -310,6 +322,7 @@ impl NameClass {
ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)),
ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)),
ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)),
+ ast::Item::ExternCrate(it) => Definition::ExternCrateDecl(sema.to_def(&it)?),
_ => return None,
};
Some(definition)
@@ -346,10 +359,8 @@ impl NameClass {
let path = use_tree.path()?;
sema.resolve_path(&path).map(Definition::from)
} else {
- let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?;
- let krate = sema.resolve_extern_crate(&extern_crate)?;
- let root_module = krate.root_module(sema.db);
- Some(Definition::Module(root_module))
+ sema.to_def(&rename.syntax().parent().and_then(ast::ExternCrate::cast)?)
+ .map(Definition::ExternCrateDecl)
}
}
}
@@ -427,7 +438,19 @@ impl OperatorClass {
#[derive(Debug)]
pub enum NameRefClass {
Definition(Definition),
- FieldShorthand { local_ref: Local, field_ref: Field },
+ FieldShorthand {
+ local_ref: Local,
+ field_ref: Field,
+ },
+ /// The specific situation where we have an extern crate decl without a rename
+ /// Here we have both a declaration and a reference.
+ /// ```rs
+ /// extern crate foo;
+ /// ```
+ ExternCrateShorthand {
+ decl: ExternCrateDecl,
+ krate: Crate,
+ },
}
impl NameRefClass {
@@ -513,10 +536,14 @@ impl NameRefClass {
}
None
},
- ast::ExternCrate(extern_crate) => {
- let krate = sema.resolve_extern_crate(&extern_crate)?;
- let root_module = krate.root_module(sema.db);
- Some(NameRefClass::Definition(Definition::Module(root_module)))
+ ast::ExternCrate(extern_crate_ast) => {
+ let extern_crate = sema.to_def(&extern_crate_ast)?;
+ let krate = extern_crate.resolved_crate(sema.db)?;
+ Some(if extern_crate_ast.rename().is_some() {
+ NameRefClass::Definition(Definition::Module(krate.root_module()))
+ } else {
+ NameRefClass::ExternCrateShorthand { krate, decl: extern_crate }
+ })
},
_ => None
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
index c8341fed1..b63dde2c2 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -167,7 +167,7 @@ impl FamousDefs<'_, '_> {
lang_crate => lang_crate,
};
let std_crate = self.find_lang_crate(lang_crate)?;
- let mut module = std_crate.root_module(db);
+ let mut module = std_crate.root_module();
for segment in path {
module = module.children(db).find_map(|child| {
let name = child.name(db)?;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
index e488300b4..49b37024a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
@@ -931,22 +931,6 @@ $ cat $(find -name '*.s')
"##,
},
Lint {
- label: "abi_thiscall",
- description: r##"# `abi_thiscall`
-
-The tracking issue for this feature is: [#42202]
-
-[#42202]: https://github.com/rust-lang/rust/issues/42202
-
-------------------------
-
-The MSVC ABI on x86 Windows uses the `thiscall` calling convention for C++
-instance methods by default; it is identical to the usual (C) calling
-convention on x86 Windows except that the first parameter of the method,
-the `this` pointer, is passed in the ECX register.
-"##,
- },
- Lint {
label: "allocator_api",
description: r##"# `allocator_api`
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
index eba9d8afc..1eb8f0002 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -9,7 +9,10 @@ use syntax::{
AstToken, SyntaxKind, SyntaxToken, TokenAtOffset,
};
-use crate::{defs::Definition, generated, RootDatabase};
+use crate::{
+ defs::{Definition, IdentClass},
+ generated, RootDatabase,
+};
pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option<Name> {
match item {
@@ -109,3 +112,16 @@ pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool {
let source_root_id = db.file_source_root(root_file);
!db.source_root(source_root_id).is_library
}
+
+pub fn get_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<Definition> {
+ for token in sema.descend_into_macros(token) {
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
+ if let Some(&[x]) = def.as_deref() {
+ return Some(x);
+ }
+ }
+ None
+}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
index 901d592c6..e52dc3567 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs
@@ -13,7 +13,7 @@ use syntax::{
use crate::{
helpers::item_name,
- items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT},
+ items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT},
RootDatabase,
};
@@ -317,7 +317,7 @@ fn path_applicable_imports(
// * improve the associated completion item matching and/or scoring to ensure no noisy completions appear
//
// see also an ignored test under FIXME comment in the qualify_path.rs module
- AssocItemSearch::Exclude,
+ AssocSearchMode::Exclude,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|item| {
@@ -334,7 +334,7 @@ fn path_applicable_imports(
sema,
current_crate,
path_candidate.name.clone(),
- AssocItemSearch::Include,
+ AssocSearchMode::Include,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|item| {
@@ -483,7 +483,7 @@ fn trait_applicable_items(
sema,
current_crate,
trait_candidate.assoc_item_name.clone(),
- AssocItemSearch::AssocItemsOnly,
+ AssocSearchMode::AssocItemsOnly,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
.filter_map(|input| item_as_assoc(db, input))
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
index 46f1353e2..3f7a3ec2d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs
@@ -3,10 +3,7 @@
//! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs.
use either::Either;
-use hir::{
- import_map::{self, ImportKind},
- AsAssocItem, Crate, ItemInNs, Semantics,
-};
+use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics};
use limit::Limit;
use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
@@ -14,23 +11,14 @@ use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase};
/// A value to use, when uncertain which limit to pick.
pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40);
-/// Three possible ways to search for the name in associated and/or other items.
-#[derive(Debug, Clone, Copy)]
-pub enum AssocItemSearch {
- /// Search for the name in both associated and other items.
- Include,
- /// Search for the name in other items only.
- Exclude,
- /// Search for the name in the associated items only.
- AssocItemsOnly,
-}
+pub use import_map::AssocSearchMode;
/// Searches for importable items with the given name in the crate and its dependencies.
pub fn items_with_name<'a>(
sema: &'a Semantics<'_, RootDatabase>,
krate: Crate,
name: NameToImport,
- assoc_item_search: AssocItemSearch,
+ assoc_item_search: AssocSearchMode,
limit: Option<usize>,
) -> impl Iterator<Item = ItemInNs> + 'a {
let _p = profile::span("items_with_name").detail(|| {
@@ -48,9 +36,7 @@ pub fn items_with_name<'a>(
let mut local_query = symbol_index::Query::new(exact_name.clone());
local_query.exact();
- let external_query = import_map::Query::new(exact_name)
- .name_only()
- .search_mode(import_map::SearchMode::Equals);
+ let external_query = import_map::Query::new(exact_name);
(
local_query,
@@ -61,17 +47,8 @@ pub fn items_with_name<'a>(
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
- .search_mode(import_map::SearchMode::Fuzzy)
- .name_only();
- match assoc_item_search {
- AssocItemSearch::Include => {}
- AssocItemSearch::Exclude => {
- external_query = external_query.exclude_import_kind(ImportKind::AssociatedItem);
- }
- AssocItemSearch::AssocItemsOnly => {
- external_query = external_query.assoc_items_only();
- }
- }
+ .fuzzy()
+ .assoc_search_mode(assoc_item_search);
if fuzzy_search_string.to_lowercase() != fuzzy_search_string {
local_query.case_sensitive();
@@ -93,13 +70,15 @@ pub fn items_with_name<'a>(
fn find_items<'a>(
sema: &'a Semantics<'_, RootDatabase>,
krate: Crate,
- assoc_item_search: AssocItemSearch,
+ assoc_item_search: AssocSearchMode,
local_query: symbol_index::Query,
external_query: import_map::Query,
) -> impl Iterator<Item = ItemInNs> + 'a {
let _p = profile::span("find_items");
let db = sema.db;
+ // NOTE: `external_query` includes `assoc_item_search`, so we don't need to
+ // filter on our own.
let external_importables =
krate.query_external_importables(db, external_query).map(|external_importable| {
match external_importable {
@@ -112,18 +91,15 @@ fn find_items<'a>(
let local_results = local_query
.search(&symbol_index::crate_symbols(db, krate))
.into_iter()
- .filter_map(|local_candidate| match local_candidate.def {
- hir::ModuleDef::Macro(macro_def) => Some(ItemInNs::Macros(macro_def)),
- def => Some(ItemInNs::from(def)),
+ .filter(move |candidate| match assoc_item_search {
+ AssocSearchMode::Include => true,
+ AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(),
+ AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(),
+ })
+ .map(|local_candidate| match local_candidate.def {
+ hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
+ def => ItemInNs::from(def),
});
- external_importables.chain(local_results).filter(move |&item| match assoc_item_search {
- AssocItemSearch::Include => true,
- AssocItemSearch::Exclude => !is_assoc_item(item, sema.db),
- AssocItemSearch::AssocItemsOnly => is_assoc_item(item, sema.db),
- })
-}
-
-fn is_assoc_item(item: ItemInNs, db: &RootDatabase) -> bool {
- item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db)).is_some()
+ external_importables.chain(local_results)
}
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index ff1a20f03..f27ed485d 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -200,8 +200,8 @@ impl RootDatabase {
hir_db::AstIdMapQuery
// hir_db::ParseMacroExpansionQuery
// hir_db::InternMacroCallQuery
- hir_db::MacroArgTextQuery
- hir_db::MacroDefQuery
+ hir_db::MacroArgNodeQuery
+ hir_db::DeclMacroExpanderQuery
// hir_db::MacroExpandQuery
hir_db::ExpandProcMacroQuery
hir_db::HygieneFrameQuery
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index 73e6a920e..1d0cb426a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -195,7 +195,7 @@ fn postorder(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> {
})
}
-impl<'a> Ctx<'a> {
+impl Ctx<'_> {
fn apply(&self, item: &SyntaxNode) {
// `transform_path` may update a node's parent and that would break the
// tree traversal. Thus all paths in the tree are collected into a vec
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 52a23b4b8..aa0bb7cce 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -82,8 +82,9 @@ impl Definition {
}
/// Textual range of the identifier which will change when renaming this
- /// `Definition`. Note that some definitions, like builtin types, can't be
- /// renamed.
+ /// `Definition`. Note that builtin types can't be
+ /// renamed and extern crate names will report its range, though a rename will introduce
+ /// an alias instead.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let res = match self {
Definition::Macro(mac) => {
@@ -146,6 +147,16 @@ impl Definition {
let lifetime = src.value.lifetime()?;
src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
}
+ Definition::ExternCrateDecl(it) => {
+ let src = it.source(sema.db)?;
+ if let Some(rename) = src.value.rename() {
+ let name = rename.name()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ } else {
+ let name = src.value.name_ref()?;
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ }
+ }
Definition::BuiltinType(_) => return None,
Definition::SelfType(_) => return None,
Definition::BuiltinAttr(_) => return None,
@@ -526,6 +537,9 @@ fn source_edit_from_def(
TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
),
+ Definition::ExternCrateDecl(decl) if decl.alias(sema.db).is_none() => {
+ (TextRange::empty(range.end()), format!(" as {new_name}"))
+ }
_ => (range, new_name.to_owned()),
};
edit.replace(range, new_name);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index e8ff107bd..d5abd0991 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -127,7 +127,7 @@ impl SearchScope {
}
/// Build a search scope spanning the given module and all its submodules.
- fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
+ pub fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
let mut entries = IntMap::default();
let (file_id, range) = {
@@ -149,10 +149,8 @@ impl SearchScope {
let mut to_visit: Vec<_> = module.children(db).collect();
while let Some(module) = to_visit.pop() {
- if let InFile { file_id, value: ModuleSource::SourceFile(_) } =
- module.definition_source(db)
- {
- entries.insert(file_id.original_file(db), None);
+ if let Some(file_id) = module.as_source_file_id(db) {
+ entries.insert(file_id, None);
}
to_visit.extend(module.children(db));
}
@@ -331,7 +329,7 @@ impl Definition {
pub struct FindUsages<'a> {
def: Definition,
sema: &'a Semantics<'a, RootDatabase>,
- scope: Option<SearchScope>,
+ scope: Option<&'a SearchScope>,
/// The container of our definition should it be an assoc item
assoc_item_container: Option<hir::AssocItemContainer>,
/// whether to search for the `Self` type of the definition
@@ -342,19 +340,19 @@ pub struct FindUsages<'a> {
impl<'a> FindUsages<'a> {
/// Enable searching for `Self` when the definition is a type or `self` for modules.
- pub fn include_self_refs(mut self) -> FindUsages<'a> {
+ pub fn include_self_refs(mut self) -> Self {
self.include_self_kw_refs = def_to_ty(self.sema, &self.def);
self.search_self_mod = true;
self
}
/// Limit the search to a given [`SearchScope`].
- pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> {
+ pub fn in_scope(self, scope: &'a SearchScope) -> Self {
self.set_scope(Some(scope))
}
/// Limit the search to a given [`SearchScope`].
- pub fn set_scope(mut self, scope: Option<SearchScope>) -> FindUsages<'a> {
+ pub fn set_scope(mut self, scope: Option<&'a SearchScope>) -> Self {
assert!(self.scope.is_none());
self.scope = scope;
self
@@ -378,7 +376,7 @@ impl<'a> FindUsages<'a> {
res
}
- fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
+ pub fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
let _p = profile::span("FindUsages:search");
let sema = self.sema;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
index 061fb0f05..39763479c 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
@@ -7,14 +7,17 @@ use std::{collections::hash_map::Entry, iter, mem};
use crate::SnippetCap;
use base_db::{AnchoredPathBuf, FileId};
+use itertools::Itertools;
use nohash_hasher::IntMap;
use stdx::never;
-use syntax::{algo, ast, ted, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
+use syntax::{
+ algo, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
+};
use text_edit::{TextEdit, TextEditBuilder};
#[derive(Default, Debug, Clone)]
pub struct SourceChange {
- pub source_file_edits: IntMap<FileId, TextEdit>,
+ pub source_file_edits: IntMap<FileId, (TextEdit, Option<SnippetEdit>)>,
pub file_system_edits: Vec<FileSystemEdit>,
pub is_snippet: bool,
}
@@ -23,7 +26,7 @@ impl SourceChange {
/// Creates a new SourceChange with the given label
/// from the edits.
pub fn from_edits(
- source_file_edits: IntMap<FileId, TextEdit>,
+ source_file_edits: IntMap<FileId, (TextEdit, Option<SnippetEdit>)>,
file_system_edits: Vec<FileSystemEdit>,
) -> Self {
SourceChange { source_file_edits, file_system_edits, is_snippet: false }
@@ -31,7 +34,7 @@ impl SourceChange {
pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self {
SourceChange {
- source_file_edits: iter::once((file_id, edit)).collect(),
+ source_file_edits: iter::once((file_id, (edit, None))).collect(),
..Default::default()
}
}
@@ -39,12 +42,31 @@ impl SourceChange {
/// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
/// edits for a file if some already exist.
pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
+ self.insert_source_and_snippet_edit(file_id, edit, None)
+ }
+
+ /// Inserts a [`TextEdit`] and potentially a [`SnippetEdit`] for the given [`FileId`].
+ /// This properly handles merging existing edits for a file if some already exist.
+ pub fn insert_source_and_snippet_edit(
+ &mut self,
+ file_id: FileId,
+ edit: TextEdit,
+ snippet_edit: Option<SnippetEdit>,
+ ) {
match self.source_file_edits.entry(file_id) {
Entry::Occupied(mut entry) => {
- never!(entry.get_mut().union(edit).is_err(), "overlapping edits for same file");
+ let value = entry.get_mut();
+ never!(value.0.union(edit).is_err(), "overlapping edits for same file");
+ never!(
+ value.1.is_some() && snippet_edit.is_some(),
+ "overlapping snippet edits for same file"
+ );
+ if value.1.is_none() {
+ value.1 = snippet_edit;
+ }
}
Entry::Vacant(entry) => {
- entry.insert(edit);
+ entry.insert((edit, snippet_edit));
}
}
}
@@ -53,7 +75,10 @@ impl SourceChange {
self.file_system_edits.push(edit);
}
- pub fn get_source_edit(&self, file_id: FileId) -> Option<&TextEdit> {
+ pub fn get_source_and_snippet_edit(
+ &self,
+ file_id: FileId,
+ ) -> Option<&(TextEdit, Option<SnippetEdit>)> {
self.source_file_edits.get(&file_id)
}
@@ -67,7 +92,18 @@ impl SourceChange {
impl Extend<(FileId, TextEdit)> for SourceChange {
fn extend<T: IntoIterator<Item = (FileId, TextEdit)>>(&mut self, iter: T) {
- iter.into_iter().for_each(|(file_id, edit)| self.insert_source_edit(file_id, edit));
+ self.extend(iter.into_iter().map(|(file_id, edit)| (file_id, (edit, None))))
+ }
+}
+
+impl Extend<(FileId, (TextEdit, Option<SnippetEdit>))> for SourceChange {
+ fn extend<T: IntoIterator<Item = (FileId, (TextEdit, Option<SnippetEdit>))>>(
+ &mut self,
+ iter: T,
+ ) {
+ iter.into_iter().for_each(|(file_id, (edit, snippet_edit))| {
+ self.insert_source_and_snippet_edit(file_id, edit, snippet_edit)
+ });
}
}
@@ -79,6 +115,8 @@ impl Extend<FileSystemEdit> for SourceChange {
impl From<IntMap<FileId, TextEdit>> for SourceChange {
fn from(source_file_edits: IntMap<FileId, TextEdit>) -> SourceChange {
+ let source_file_edits =
+ source_file_edits.into_iter().map(|(file_id, edit)| (file_id, (edit, None))).collect();
SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
}
}
@@ -91,6 +129,65 @@ impl FromIterator<(FileId, TextEdit)> for SourceChange {
}
}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct SnippetEdit(Vec<(u32, TextRange)>);
+
+impl SnippetEdit {
+ pub fn new(snippets: Vec<Snippet>) -> Self {
+ let mut snippet_ranges = snippets
+ .into_iter()
+ .zip(1..)
+ .with_position()
+ .map(|pos| {
+ let (snippet, index) = match pos {
+ itertools::Position::First(it) | itertools::Position::Middle(it) => it,
+ // last/only snippet gets index 0
+ itertools::Position::Last((snippet, _))
+ | itertools::Position::Only((snippet, _)) => (snippet, 0),
+ };
+
+ let range = match snippet {
+ Snippet::Tabstop(pos) => TextRange::empty(pos),
+ Snippet::Placeholder(range) => range,
+ };
+ (index, range)
+ })
+ .collect_vec();
+
+ snippet_ranges.sort_by_key(|(_, range)| range.start());
+
+ // Ensure that none of the ranges overlap
+ let disjoint_ranges = snippet_ranges
+ .iter()
+ .zip(snippet_ranges.iter().skip(1))
+ .all(|((_, left), (_, right))| left.end() <= right.start() || left == right);
+ stdx::always!(disjoint_ranges);
+
+ SnippetEdit(snippet_ranges)
+ }
+
+ /// Inserts all of the snippets into the given text.
+ pub fn apply(&self, text: &mut String) {
+ // Start from the back so that we don't have to adjust ranges
+ for (index, range) in self.0.iter().rev() {
+ if range.is_empty() {
+ // is a tabstop
+ text.insert_str(range.start().into(), &format!("${index}"));
+ } else {
+ // is a placeholder
+ text.insert(range.end().into(), '}');
+ text.insert_str(range.start().into(), &format!("${{{index}:"));
+ }
+ }
+ }
+
+ /// Gets the underlying snippet index + text range
+ /// Tabstops are represented by an empty range, and placeholders use the range that they were given
+ pub fn into_edit_ranges(self) -> Vec<(u32, TextRange)> {
+ self.0
+ }
+}
+
pub struct SourceChangeBuilder {
pub edit: TextEditBuilder,
pub file_id: FileId,
@@ -149,24 +246,19 @@ impl SourceChangeBuilder {
}
fn commit(&mut self) {
- // Render snippets first so that they get bundled into the tree diff
- if let Some(mut snippets) = self.snippet_builder.take() {
- // Last snippet always has stop index 0
- let last_stop = snippets.places.pop().unwrap();
- last_stop.place(0);
-
- for (index, stop) in snippets.places.into_iter().enumerate() {
- stop.place(index + 1)
- }
- }
+ let snippet_edit = self.snippet_builder.take().map(|builder| {
+ SnippetEdit::new(
+ builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(),
+ )
+ });
if let Some(tm) = self.mutated_tree.take() {
- algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
+ algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit);
}
let edit = mem::take(&mut self.edit).finish();
- if !edit.is_empty() {
- self.source_change.insert_source_edit(self.file_id, edit);
+ if !edit.is_empty() || snippet_edit.is_some() {
+ self.source_change.insert_source_and_snippet_edit(self.file_id, edit, snippet_edit);
}
}
@@ -237,19 +329,31 @@ impl SourceChangeBuilder {
/// Adds a tabstop snippet to place the cursor before `node`
pub fn add_tabstop_before(&mut self, _cap: SnippetCap, node: impl AstNode) {
assert!(node.syntax().parent().is_some());
- self.add_snippet(PlaceSnippet::Before(node.syntax().clone()));
+ self.add_snippet(PlaceSnippet::Before(node.syntax().clone().into()));
}
/// Adds a tabstop snippet to place the cursor after `node`
pub fn add_tabstop_after(&mut self, _cap: SnippetCap, node: impl AstNode) {
assert!(node.syntax().parent().is_some());
- self.add_snippet(PlaceSnippet::After(node.syntax().clone()));
+ self.add_snippet(PlaceSnippet::After(node.syntax().clone().into()));
+ }
+
+ /// Adds a tabstop snippet to place the cursor before `token`
+ pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
+ assert!(token.parent().is_some());
+ self.add_snippet(PlaceSnippet::Before(token.clone().into()));
+ }
+
+ /// Adds a tabstop snippet to place the cursor after `token`
+ pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
+ assert!(token.parent().is_some());
+ self.add_snippet(PlaceSnippet::After(token.clone().into()));
}
/// Adds a snippet to move the cursor selected over `node`
pub fn add_placeholder_snippet(&mut self, _cap: SnippetCap, node: impl AstNode) {
assert!(node.syntax().parent().is_some());
- self.add_snippet(PlaceSnippet::Over(node.syntax().clone()))
+ self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into()))
}
fn add_snippet(&mut self, snippet: PlaceSnippet) {
@@ -260,6 +364,16 @@ impl SourceChangeBuilder {
pub fn finish(mut self) -> SourceChange {
self.commit();
+
+ // Only one file can have snippet edits
+ stdx::never!(self
+ .source_change
+ .source_file_edits
+ .iter()
+ .filter(|(_, (_, snippet_edit))| snippet_edit.is_some())
+ .at_most_one()
+ .is_err());
+
mem::take(&mut self.source_change)
}
}
@@ -281,65 +395,28 @@ impl From<FileSystemEdit> for SourceChange {
}
}
+pub enum Snippet {
+ /// A tabstop snippet (e.g. `$0`).
+ Tabstop(TextSize),
+ /// A placeholder snippet (e.g. `${0:placeholder}`).
+ Placeholder(TextRange),
+}
+
enum PlaceSnippet {
- /// Place a tabstop before a node
- Before(SyntaxNode),
- /// Place a tabstop before a node
- After(SyntaxNode),
- /// Place a placeholder snippet in place of the node
- Over(SyntaxNode),
+ /// Place a tabstop before an element
+ Before(SyntaxElement),
+ /// Place a tabstop before an element
+ After(SyntaxElement),
+ /// Place a placeholder snippet in place of the element
+ Over(SyntaxElement),
}
impl PlaceSnippet {
- /// Places the snippet before or over a node with the given tab stop index
- fn place(self, order: usize) {
- // ensure the target node is still attached
- match &self {
- PlaceSnippet::Before(node) | PlaceSnippet::After(node) | PlaceSnippet::Over(node) => {
- // node should still be in the tree, but if it isn't
- // then it's okay to just ignore this place
- if stdx::never!(node.parent().is_none()) {
- return;
- }
- }
- }
-
+ fn finalize_position(self) -> Snippet {
match self {
- PlaceSnippet::Before(node) => {
- ted::insert_raw(ted::Position::before(&node), Self::make_tab_stop(order));
- }
- PlaceSnippet::After(node) => {
- ted::insert_raw(ted::Position::after(&node), Self::make_tab_stop(order));
- }
- PlaceSnippet::Over(node) => {
- let position = ted::Position::before(&node);
- node.detach();
-
- let snippet = ast::SourceFile::parse(&format!("${{{order}:_}}"))
- .syntax_node()
- .clone_for_update();
-
- let placeholder =
- snippet.descendants().find_map(ast::UnderscoreExpr::cast).unwrap();
- ted::replace(placeholder.syntax(), node);
-
- ted::insert_raw(position, snippet);
- }
+ PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()),
+ PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()),
+ PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()),
}
}
-
- fn make_tab_stop(order: usize) -> SyntaxNode {
- let stop = ast::SourceFile::parse(&format!("stop!(${order})"))
- .syntax_node()
- .descendants()
- .find_map(ast::TokenTree::cast)
- .unwrap()
- .syntax()
- .clone_for_update();
-
- stop.first_token().unwrap().detach();
- stop.last_token().unwrap().detach();
-
- stop
- }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
index e18624fcc..14aa39401 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1"
either = "1.7.0"
itertools = "0.10.5"
serde_json = "1.0.86"
+once_cell = "1.17.0"
# local deps
profile.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
index 30576c71f..491005403 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: break-outside-of-loop
//
@@ -13,10 +13,11 @@ pub(crate) fn break_outside_of_loop(
let construct = if d.is_break { "break" } else { "continue" };
format!("{construct} outside of loop")
};
- Diagnostic::new(
- "break-outside-of-loop",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0268"),
message,
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
index d2f27664d..e1e5db91c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs
@@ -1,6 +1,6 @@
use hir::HirDisplay;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: expected-function
//
@@ -9,10 +9,11 @@ pub(crate) fn expected_function(
ctx: &DiagnosticsContext<'_>,
d: &hir::ExpectedFunction,
) -> Diagnostic {
- Diagnostic::new(
- "expected-function",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0618"),
format!("expected function, found {}", d.found.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.call.clone().map(|it| it.into())).range,
+ d.call.clone().map(|it| it.into()),
)
.experimental()
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
index 2b7105362..3b69640af 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs
@@ -5,7 +5,7 @@ use ide_db::{base_db::FileId, source_change::SourceChange};
use syntax::{ast, match_ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode};
pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: &SyntaxNode) {
match_ast! {
@@ -46,14 +46,17 @@ fn check_expr_field_shorthand(
let field_range = record_field.syntax().text_range();
acc.push(
- Diagnostic::new("use-field-shorthand", "Shorthand struct initialization", field_range)
- .severity(Severity::WeakWarning)
- .with_fixes(Some(vec![fix(
- "use_expr_field_shorthand",
- "Use struct shorthand initialization",
- SourceChange::from_text_edit(file_id, edit),
- field_range,
- )])),
+ Diagnostic::new(
+ DiagnosticCode::Clippy("redundant_field_names"),
+ "Shorthand struct initialization",
+ field_range,
+ )
+ .with_fixes(Some(vec![fix(
+ "use_expr_field_shorthand",
+ "Use struct shorthand initialization",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
);
}
}
@@ -87,14 +90,17 @@ fn check_pat_field_shorthand(
let field_range = record_pat_field.syntax().text_range();
acc.push(
- Diagnostic::new("use-field-shorthand", "Shorthand struct pattern", field_range)
- .severity(Severity::WeakWarning)
- .with_fixes(Some(vec![fix(
- "use_pat_field_shorthand",
- "Use struct field shorthand",
- SourceChange::from_text_edit(file_id, edit),
- field_range,
- )])),
+ Diagnostic::new(
+ DiagnosticCode::Clippy("redundant_field_names"),
+ "Shorthand struct pattern",
+ field_range,
+ )
+ .with_fixes(Some(vec![fix(
+ "use_pat_field_shorthand",
+ "Use struct field shorthand",
+ SourceChange::from_text_edit(file_id, edit),
+ field_range,
+ )])),
);
}
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
index f558b7256..9eb763d3e 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -1,7 +1,7 @@
use cfg::DnfExpr;
use stdx::format_to;
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: inactive-code
//
@@ -27,13 +27,12 @@ pub(crate) fn inactive_code(
format_to!(message, ": {}", inactive);
}
}
-
+ // FIXME: This shouldn't be a diagnostic
let res = Diagnostic::new(
- "inactive-code",
+ DiagnosticCode::Ra("inactive-code", Severity::WeakWarning),
message,
ctx.sema.diagnostics_display_range(d.node.clone()).range,
)
- .severity(Severity::WeakWarning)
.with_unused(true);
Some(res)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
index 72af9ebfc..4afb4db03 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
@@ -1,17 +1,17 @@
use hir::InFile;
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: incoherent-impl
//
// This diagnostic is triggered if the targe type of an impl is from a foreign crate.
pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic {
- Diagnostic::new(
- "incoherent-impl",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0210"),
format!("cannot define inherent `impl` for foreign type"),
- ctx.sema.diagnostics_display_range(InFile::new(d.file_id, d.impl_.clone().into())).range,
+ InFile::new(d.file_id, d.impl_.clone().into()),
)
- .severity(Severity::Error)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 90279e145..235062bf5 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -1,4 +1,4 @@
-use hir::{db::ExpandDatabase, InFile};
+use hir::{db::ExpandDatabase, CaseType, InFile};
use ide_db::{assists::Assist, defs::NameClass};
use syntax::AstNode;
@@ -6,23 +6,29 @@ use crate::{
// references::rename::rename_with_semantics,
unresolved_fix,
Diagnostic,
+ DiagnosticCode,
DiagnosticsContext,
- Severity,
};
// Diagnostic: incorrect-ident-case
//
// This diagnostic is triggered if an item name doesn't follow https://doc.rust-lang.org/1.0.0/style/style/naming/README.html[Rust naming convention].
pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Diagnostic {
- Diagnostic::new(
- "incorrect-ident-case",
+ let code = match d.expected_case {
+ CaseType::LowerSnakeCase => DiagnosticCode::RustcLint("non_snake_case"),
+ CaseType::UpperSnakeCase => DiagnosticCode::RustcLint("non_upper_case_globals"),
+ // The name is lying. It also covers variants, traits, ...
+ CaseType::UpperCamelCase => DiagnosticCode::RustcLint("non_camel_case_types"),
+ };
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ code,
format!(
"{} `{}` should have {} name, e.g. `{}`",
d.ident_type, d.ident_text, d.expected_case, d.suggested_text
),
- ctx.sema.diagnostics_display_range(InFile::new(d.file, d.ident.clone().into())).range,
+ InFile::new(d.file, d.ident.clone().into()),
)
- .severity(Severity::WeakWarning)
.with_fixes(fixes(ctx, d))
}
@@ -149,7 +155,7 @@ impl TestStruct {
check_diagnostics(
r#"
fn FOO() {}
-// ^^^ 💡 weak: Function `FOO` should have snake_case name, e.g. `foo`
+// ^^^ 💡 warn: Function `FOO` should have snake_case name, e.g. `foo`
"#,
);
check_fix(r#"fn FOO$0() {}"#, r#"fn foo() {}"#);
@@ -160,7 +166,7 @@ fn FOO() {}
check_diagnostics(
r#"
fn NonSnakeCaseName() {}
-// ^^^^^^^^^^^^^^^^ 💡 weak: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
+// ^^^^^^^^^^^^^^^^ 💡 warn: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
"#,
);
}
@@ -170,10 +176,10 @@ fn NonSnakeCaseName() {}
check_diagnostics(
r#"
fn foo(SomeParam: u8) {}
- // ^^^^^^^^^ 💡 weak: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
+ // ^^^^^^^^^ 💡 warn: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
- // ^^^^^^^^^^ 💡 weak: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
+ // ^^^^^^^^^^ 💡 warn: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
"#,
);
}
@@ -184,9 +190,9 @@ fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
r#"
fn foo() {
let SOME_VALUE = 10;
- // ^^^^^^^^^^ 💡 weak: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
+ // ^^^^^^^^^^ 💡 warn: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
let AnotherValue = 20;
- // ^^^^^^^^^^^^ 💡 weak: Variable `AnotherValue` should have snake_case name, e.g. `another_value`
+ // ^^^^^^^^^^^^ 💡 warn: Variable `AnotherValue` should have snake_case name, e.g. `another_value`
}
"#,
);
@@ -197,10 +203,10 @@ fn foo() {
check_diagnostics(
r#"
struct non_camel_case_name {}
- // ^^^^^^^^^^^^^^^^^^^ 💡 weak: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
+ // ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
struct SCREAMING_CASE {}
- // ^^^^^^^^^^^^^^ 💡 weak: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
+ // ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
"#,
);
}
@@ -219,7 +225,7 @@ struct AABB {}
check_diagnostics(
r#"
struct SomeStruct { SomeField: u8 }
- // ^^^^^^^^^ 💡 weak: Field `SomeField` should have snake_case name, e.g. `some_field`
+ // ^^^^^^^^^ 💡 warn: Field `SomeField` should have snake_case name, e.g. `some_field`
"#,
);
}
@@ -229,10 +235,10 @@ struct SomeStruct { SomeField: u8 }
check_diagnostics(
r#"
enum some_enum { Val(u8) }
- // ^^^^^^^^^ 💡 weak: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
+ // ^^^^^^^^^ 💡 warn: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
enum SOME_ENUM {}
- // ^^^^^^^^^ 💡 weak: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
+ // ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
"#,
);
}
@@ -251,7 +257,7 @@ enum AABB {}
check_diagnostics(
r#"
enum SomeEnum { SOME_VARIANT(u8) }
- // ^^^^^^^^^^^^ 💡 weak: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
+ // ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
"#,
);
}
@@ -261,7 +267,7 @@ enum SomeEnum { SOME_VARIANT(u8) }
check_diagnostics(
r#"
const some_weird_const: u8 = 10;
- // ^^^^^^^^^^^^^^^^ 💡 weak: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+ // ^^^^^^^^^^^^^^^^ 💡 warn: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
"#,
);
}
@@ -271,7 +277,7 @@ const some_weird_const: u8 = 10;
check_diagnostics(
r#"
static some_weird_const: u8 = 10;
- // ^^^^^^^^^^^^^^^^ 💡 weak: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
+ // ^^^^^^^^^^^^^^^^ 💡 warn: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST`
"#,
);
}
@@ -281,13 +287,13 @@ static some_weird_const: u8 = 10;
check_diagnostics(
r#"
struct someStruct;
- // ^^^^^^^^^^ 💡 weak: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
+ // ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
impl someStruct {
fn SomeFunc(&self) {
- // ^^^^^^^^ 💡 weak: Function `SomeFunc` should have snake_case name, e.g. `some_func`
+ // ^^^^^^^^ 💡 warn: Function `SomeFunc` should have snake_case name, e.g. `some_func`
let WHY_VAR_IS_CAPS = 10;
- // ^^^^^^^^^^^^^^^ 💡 weak: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
+ // ^^^^^^^^^^^^^^^ 💡 warn: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
}
}
"#,
@@ -319,7 +325,7 @@ enum Option { Some, None }
fn main() {
match Option::None {
SOME_VAR @ None => (),
- // ^^^^^^^^ 💡 weak: Variable `SOME_VAR` should have snake_case name, e.g. `some_var`
+ // ^^^^^^^^ 💡 warn: Variable `SOME_VAR` should have snake_case name, e.g. `some_var`
Some => (),
}
}
@@ -461,7 +467,7 @@ mod CheckNonstandardStyle {
#[allow(bad_style)]
mod CheckBadStyle {
- fn HiImABadFnName() {}
+ struct fooo;
}
mod F {
@@ -483,4 +489,156 @@ pub static SomeStatic: u8 = 10;
"#,
);
}
+
+ #[test]
+ fn deny_attributes() {
+ check_diagnostics(
+ r#"
+#[deny(non_snake_case)]
+fn NonSnakeCaseName(some_var: u8) -> u8 {
+ //^^^^^^^^^^^^^^^^ 💡 error: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name`
+ // cov_flags generated output from elsewhere in this file
+ extern "C" {
+ #[no_mangle]
+ static lower_case: u8;
+ }
+
+ let OtherVar = some_var + 1;
+ //^^^^^^^^ 💡 error: Variable `OtherVar` should have snake_case name, e.g. `other_var`
+ OtherVar
+}
+
+#[deny(nonstandard_style)]
+mod CheckNonstandardStyle {
+ fn HiImABadFnName() {}
+ //^^^^^^^^^^^^^^ 💡 error: Function `HiImABadFnName` should have snake_case name, e.g. `hi_im_abad_fn_name`
+}
+
+#[deny(warnings)]
+mod CheckBadStyle {
+ struct fooo;
+ //^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
+}
+
+mod F {
+ #![deny(non_snake_case)]
+ fn CheckItWorksWithModAttr() {}
+ //^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: Function `CheckItWorksWithModAttr` should have snake_case name, e.g. `check_it_works_with_mod_attr`
+}
+
+#[deny(non_snake_case, non_camel_case_types)]
+pub struct some_type {
+ //^^^^^^^^^ 💡 error: Structure `some_type` should have CamelCase name, e.g. `SomeType`
+ SOME_FIELD: u8,
+ //^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field`
+ SomeField: u16,
+ //^^^^^^^^^ 💡 error: Field `SomeField` should have snake_case name, e.g. `some_field`
+}
+
+#[deny(non_upper_case_globals)]
+pub const some_const: u8 = 10;
+ //^^^^^^^^^^ 💡 error: Constant `some_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_CONST`
+
+#[deny(non_upper_case_globals)]
+pub static SomeStatic: u8 = 10;
+ //^^^^^^^^^^ 💡 error: Static variable `SomeStatic` should have UPPER_SNAKE_CASE name, e.g. `SOME_STATIC`
+ "#,
+ );
+ }
+
+ #[test]
+ fn fn_inner_items() {
+ check_diagnostics(
+ r#"
+fn main() {
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ let INNER_INNER = 42;
+ //^^^^^^^^^^^ 💡 warn: Variable `INNER_INNER` should have snake_case name, e.g. `inner_inner`
+ }
+
+ let INNER_LOCAL = 42;
+ //^^^^^^^^^^^ 💡 warn: Variable `INNER_LOCAL` should have snake_case name, e.g. `inner_local`
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn const_body_inner_items() {
+ check_diagnostics(
+ r#"
+const _: () = {
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+
+ const foo: () = {
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ };
+};
+"#,
+ );
+ }
+
+ #[test]
+ fn static_body_inner_items() {
+ check_diagnostics(
+ r#"
+static FOO: () = {
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+
+ static bar: () = {
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ };
+};
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_variant_body_inner_item() {
+ check_diagnostics(
+ r#"
+enum E {
+ A = {
+ const foo: bool = true;
+ //^^^ 💡 warn: Constant `foo` should have UPPER_SNAKE_CASE name, e.g. `FOO`
+ static bar: bool = true;
+ //^^^ 💡 warn: Static variable `bar` should have UPPER_SNAKE_CASE name, e.g. `BAR`
+ fn BAZ() {}
+ //^^^ 💡 warn: Function `BAZ` should have snake_case name, e.g. `baz`
+ 42
+ },
+}
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index c779266bc..1ec17952b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: invalid-derive-target
//
@@ -11,11 +11,10 @@ pub(crate) fn invalid_derive_target(
let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
Diagnostic::new(
- "invalid-derive-target",
+ DiagnosticCode::RustcHardError("E0774"),
"`derive` may only be applied to `struct`s, `enum`s and `union`s",
display_range,
)
- .severity(Severity::Error)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index 04ce1e0fe..a337e2660 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -17,7 +17,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsConfig, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity};
#[derive(Default)]
struct State {
@@ -117,11 +117,10 @@ pub(crate) fn json_in_items(
edit.insert(range.start(), state.result);
acc.push(
Diagnostic::new(
- "json-is-not-rust",
+ DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
"JSON syntax is not valid as a Rust item",
range,
)
- .severity(Severity::WeakWarning)
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(file_id);
let scope = match import_scope {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
index 7547779a9..f54cdd63b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: macro-error
//
@@ -6,7 +6,12 @@ use crate::{Diagnostic, DiagnosticsContext};
pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
// Use more accurate position if available.
let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
- Diagnostic::new("macro-error", d.message.clone(), display_range).experimental()
+ Diagnostic::new(
+ DiagnosticCode::Ra("macro-error", Severity::Error),
+ d.message.clone(),
+ display_range,
+ )
+ .experimental()
}
// Diagnostic: macro-error
@@ -16,7 +21,12 @@ pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefErr
// Use more accurate position if available.
let display_range =
ctx.resolve_precise_location(&d.node.clone().map(|it| it.syntax_node_ptr()), d.name);
- Diagnostic::new("macro-def-error", d.message.clone(), display_range).experimental()
+ Diagnostic::new(
+ DiagnosticCode::Ra("macro-def-error", Severity::Error),
+ d.message.clone(),
+ display_range,
+ )
+ .experimental()
}
#[cfg(test)]
@@ -41,6 +51,9 @@ macro_rules! compile_error { () => {} }
compile_error!("compile_error macro works");
//^^^^^^^^^^^^^ error: compile_error macro works
+
+ compile_error! { "compile_error macro braced works" }
+//^^^^^^^^^^^^^ error: compile_error macro braced works
"#,
);
}
@@ -67,7 +80,7 @@ macro_rules! m {
fn f() {
m!();
- //^^^^ error: unresolved macro `$crate::private::concat!`
+ //^^^^ error: unresolved macro $crate::private::concat
}
//- /core.rs crate:core
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index cd48bdba0..fc57dde69 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: malformed-derive
//
@@ -10,11 +10,10 @@ pub(crate) fn malformed_derive(
let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
Diagnostic::new(
- "malformed-derive",
+ DiagnosticCode::RustcHardError("E0777"),
"malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`",
display_range,
)
- .severity(Severity::Error)
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index c5db8c374..6238c7e09 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -3,7 +3,7 @@ use syntax::{
AstNode, TextRange,
};
-use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext};
+use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: mismatched-arg-count
//
@@ -14,7 +14,7 @@ pub(crate) fn mismatched_arg_count(
) -> Diagnostic {
let s = if d.expected == 1 { "" } else { "s" };
let message = format!("expected {} argument{s}, found {}", d.expected, d.found);
- Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d))
+ Diagnostic::new(DiagnosticCode::RustcHardError("E0107"), message, invalid_args_range(ctx, d))
}
fn invalid_args_range(ctx: &DiagnosticsContext<'_>, d: &hir::MismatchedArgCount) -> TextRange {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 60ccc41df..acc31cd11 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -15,7 +15,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsContext};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: missing-fields
//
@@ -42,7 +42,7 @@ pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingField
.unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())),
);
- Diagnostic::new("missing-fields", message, ctx.sema.diagnostics_display_range(ptr).range)
+ Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError("E0063"), message, ptr)
.with_fixes(fixes(ctx, d))
}
@@ -208,7 +208,7 @@ fn get_default_constructor(
}
let krate = ctx.sema.to_module_def(d.file.original_file(ctx.sema.db))?.krate();
- let module = krate.root_module(ctx.sema.db);
+ let module = krate.root_module();
// Look for a ::new() associated function
let has_new_func = ty
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 3f13b97a4..82a9a3bd5 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: missing-match-arm
//
@@ -7,10 +7,11 @@ pub(crate) fn missing_match_arms(
ctx: &DiagnosticsContext<'_>,
d: &hir::MissingMatchArms,
) -> Diagnostic {
- Diagnostic::new(
- "missing-match-arm",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0004"),
format!("missing match arm: {}", d.uncovered_patterns),
- ctx.sema.diagnostics_display_range(d.scrutinee_expr.clone().map(Into::into)).range,
+ d.scrutinee_expr.clone().map(Into::into),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 2026b6fce..70b26009b 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -4,16 +4,17 @@ use syntax::{ast, SyntaxNode};
use syntax::{match_ast, AstNode};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsContext};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: missing-unsafe
//
// This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block.
pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Diagnostic {
- Diagnostic::new(
- "missing-unsafe",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0133"),
"this operation is unsafe and requires an unsafe function or block",
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 32e321107..3aa4aa970 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -1,14 +1,15 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
use hir::HirDisplay;
// Diagnostic: moved-out-of-ref
//
// This diagnostic is triggered on moving non copy things out of references.
pub(crate) fn moved_out_of_ref(ctx: &DiagnosticsContext<'_>, d: &hir::MovedOutOfRef) -> Diagnostic {
- Diagnostic::new(
- "moved-out-of-ref",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0507"),
format!("cannot move `{}` out of reference", d.ty.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.span.clone()).range,
+ d.span.clone(),
)
.experimental() // spans are broken, and I'm not sure how precise we can detect copy types
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index f61460e31..e0c3bedce 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -2,7 +2,7 @@ use ide_db::source_change::SourceChange;
use syntax::{AstNode, SyntaxKind, SyntaxNode, SyntaxToken, T};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, DiagnosticsContext, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: need-mut
//
@@ -29,13 +29,15 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagno
use_range,
)])
})();
- Diagnostic::new(
- "need-mut",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ // FIXME: `E0384` is not the only error that this diagnostic handles
+ DiagnosticCode::RustcHardError("E0384"),
format!(
"cannot mutate immutable variable `{}`",
d.local.name(ctx.sema.db).display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.span.clone()).range,
+ d.span.clone(),
)
.with_fixes(fixes)
}
@@ -68,12 +70,12 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Di
)])
})();
let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
- Diagnostic::new(
- "unused-mut",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcLint("unused_mut"),
"variable does not need to be mutable",
- ctx.sema.diagnostics_display_range(ast).range,
+ ast,
)
- .severity(Severity::WeakWarning)
.experimental() // Not supporting `#[allow(unused_mut)]` leads to false positive.
.with_fixes(fixes)
}
@@ -93,7 +95,7 @@ mod tests {
fn f(_: i32) {}
fn main() {
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x);
}
"#,
@@ -268,7 +270,7 @@ fn main() {
fn f(_: i32) {}
fn main() {
let mut x = (2, 7);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x.1);
}
"#,
@@ -302,7 +304,7 @@ fn main() {
r#"
fn main() {
let mut x = &mut 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
*x = 5;
}
"#,
@@ -346,7 +348,7 @@ fn main() {
r#"
//- minicore: copy, builtin_impls
fn clone(mut i: &!) -> ! {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
*i
}
"#,
@@ -360,7 +362,7 @@ fn main() {
//- minicore: option
fn main() {
let mut v = &mut Some(2);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let _ = || match v {
Some(k) => {
*k = 5;
@@ -386,7 +388,7 @@ fn main() {
fn main() {
match (2, 3) {
(x, mut y) => {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x = 7;
//^^^^^ 💡 error: cannot mutate immutable variable `x`
}
@@ -407,7 +409,7 @@ fn main() {
fn main() {
return;
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -417,7 +419,7 @@ fn main() {
fn main() {
loop {}
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -438,7 +440,7 @@ fn main(b: bool) {
g();
}
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -452,7 +454,7 @@ fn main(b: bool) {
return;
}
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
&mut x;
}
"#,
@@ -466,7 +468,7 @@ fn main(b: bool) {
fn f(_: i32) {}
fn main() {
let mut x;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x = 5;
f(x);
}
@@ -477,7 +479,7 @@ fn main() {
fn f(_: i32) {}
fn main(b: bool) {
let mut x;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
if b {
x = 1;
} else {
@@ -552,15 +554,15 @@ fn f(_: i32) {}
fn main() {
loop {
let mut x = 1;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x);
if let mut y = 2 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(y);
}
match 3 {
mut z => f(z),
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
}
}
}
@@ -577,9 +579,9 @@ fn main() {
loop {
let c @ (
mut b,
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
mut d
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
);
a = 1;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
@@ -597,7 +599,7 @@ fn main() {
check_diagnostics(
r#"
fn f(mut x: i32) {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
}
"#,
);
@@ -640,7 +642,7 @@ fn f() {
//- minicore: iterators, copy
fn f(x: [(i32, u8); 10]) {
for (a, mut b) in x {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
}
@@ -657,9 +659,9 @@ fn f(x: [(i32, u8); 10]) {
fn f(x: [(i32, u8); 10]) {
let mut it = x.into_iter();
while let Some((a, mut b)) = it.next() {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
while let Some((c, mut d)) = it.next() {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
c = 2;
@@ -683,7 +685,7 @@ fn f() {
let x = &mut x;
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
let mut x = x;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x[2] = 5;
}
"#,
@@ -711,13 +713,13 @@ impl IndexMut<usize> for Foo {
}
fn f() {
let mut x = Foo;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y = &x[2];
let x = Foo;
let y = &mut x[2];
//^💡 error: cannot mutate immutable variable `x`
let mut x = &mut Foo;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y: &mut (i32, u8) = &mut x[2];
let x = Foo;
let ref mut y = x[7];
@@ -731,7 +733,7 @@ fn f() {
}
let mut x = Foo;
let mut i = 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y = &mut x[i];
}
"#,
@@ -759,7 +761,7 @@ impl DerefMut for Foo {
}
fn f() {
let mut x = Foo;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let y = &*x;
let x = Foo;
let y = &mut *x;
@@ -790,11 +792,27 @@ fn f() {
fn f(_: i32) {}
fn main() {
let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7));
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
f(x);
}
"#,
);
+ check_diagnostics(
+ r#"
+struct Foo(i32);
+
+const X: Foo = Foo(5);
+const Y: Foo = Foo(12);
+
+const fn f(mut a: Foo) -> bool {
+ //^^^^^ 💡 warn: variable does not need to be mutable
+ match a {
+ X | Y => true,
+ _ => false,
+ }
+}
+"#,
+ );
}
#[test]
@@ -842,7 +860,7 @@ pub struct TreeLeaf {
pub fn test() {
let mut tree = Tree::Leaf(
- //^^^^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^^^^ 💡 warn: variable does not need to be mutable
TreeLeaf {
depth: 0,
data: 0
@@ -859,7 +877,7 @@ pub fn test() {
r#"
//- minicore: fn
fn fn_ref(mut x: impl Fn(u8) -> u8) -> u8 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x(2)
}
fn fn_mut(x: impl FnMut(u8) -> u8) -> u8 {
@@ -867,11 +885,11 @@ fn fn_mut(x: impl FnMut(u8) -> u8) -> u8 {
//^ 💡 error: cannot mutate immutable variable `x`
}
fn fn_borrow_mut(mut x: &mut impl FnMut(u8) -> u8) -> u8 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x(2)
}
fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
x(2)
}
"#,
@@ -915,14 +933,14 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
//- minicore: copy, fn
fn f() {
let mut x = 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let mut y = 2;
y = 7;
let closure = || {
let mut z = 8;
z = 3;
let mut k = z;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
};
}
"#,
@@ -949,7 +967,7 @@ fn f() {
fn f() {
struct X;
let mut x = X;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let c1 = || x;
let mut x = X;
let c2 = || { x = X; x };
@@ -965,12 +983,12 @@ fn f() {
fn f() {
let mut x = &mut 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = || { *x = 2; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
let mut x = &mut 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = || { *x = 2; &x; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
@@ -979,12 +997,12 @@ fn f() {
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
let mut x = &mut 5;
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = move || { *x = 2; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
let mut x = &mut X(1, 2);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
let closure1 = || { x.0 = 2; };
let _ = closure1();
//^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1`
@@ -1001,7 +1019,7 @@ fn f() {
fn x(t: &[u8]) {
match t {
&[a, mut b] | &[a, _, mut b] => {
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
@@ -1055,7 +1073,7 @@ fn f() {
*x = 7;
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
let mut y = Box::new(5);
- //^^^^^ 💡 weak: variable does not need to be mutable
+ //^^^^^ 💡 warn: variable does not need to be mutable
*x = *y;
//^^^^^^^ 💡 error: cannot mutate immutable variable `x`
let x = Box::new(5);
@@ -1067,6 +1085,33 @@ fn f() {
}
#[test]
+ fn regression_15143() {
+ check_diagnostics(
+ r#"
+ trait Tr {
+ type Ty;
+ }
+
+ struct A;
+
+ impl Tr for A {
+ type Ty = (u32, i64);
+ }
+
+ struct B<T: Tr> {
+ f: <T as Tr>::Ty,
+ }
+
+ fn main(b: B<A>) {
+ let f = b.f.0;
+ f = 5;
+ //^^^^^ 💡 error: cannot mutate immutable variable `f`
+ }
+ "#,
+ );
+ }
+
+ #[test]
fn allow_unused_mut_for_identifiers_starting_with_underline() {
check_diagnostics(
r#"
@@ -1080,17 +1125,51 @@ fn main() {
}
#[test]
- fn respect_allow_unused_mut() {
- // FIXME: respect
+ fn respect_lint_attributes_for_unused_mut() {
check_diagnostics(
r#"
fn f(_: i32) {}
fn main() {
#[allow(unused_mut)]
let mut x = 2;
- //^^^^^ 💡 weak: variable does not need to be mutable
f(x);
}
+
+fn main2() {
+ #[deny(unused_mut)]
+ let mut x = 2;
+ //^^^^^ 💡 error: variable does not need to be mutable
+ f(x);
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+macro_rules! mac {
+ ($($x:expr),*$(,)*) => ({
+ #[allow(unused_mut)]
+ let mut vec = 2;
+ vec
+ });
+}
+
+fn main2() {
+ let mut x = mac![];
+ //^^^^^ 💡 warn: variable does not need to be mutable
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn regression_15099() {
+ check_diagnostics(
+ r#"
+//- minicore: iterator, range
+fn f() {
+ loop {}
+ for _ in 0..2 {}
+}
"#,
);
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
index a39eceab2..a34a5824f 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -6,16 +6,17 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Assist, Diagnostic, DiagnosticsContext};
+use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: no-such-field
//
// This diagnostic is triggered if created structure does not have field provided in record.
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
- Diagnostic::new(
- "no-such-field",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0559"),
"no such field",
- ctx.sema.diagnostics_display_range(d.field.clone().map(|it| it.into())).range,
+ d.field.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
index 4cd85a479..c44d28e77 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
@@ -1,6 +1,6 @@
use either::Either;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: private-assoc-item
//
@@ -16,8 +16,9 @@ pub(crate) fn private_assoc_item(
.name(ctx.sema.db)
.map(|name| format!("`{}` ", name.display(ctx.sema.db)))
.unwrap_or_default();
- Diagnostic::new(
- "private-assoc-item",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0624"),
format!(
"{} {}is private",
match d.item {
@@ -27,15 +28,13 @@ pub(crate) fn private_assoc_item(
},
name,
),
- ctx.sema
- .diagnostics_display_range(d.expr_or_pat.clone().map(|it| match it {
+ d.expr_or_pat.clone().map(|it| match it {
+ Either::Left(it) => it.into(),
+ Either::Right(it) => match it {
Either::Left(it) => it.into(),
- Either::Right(it) => match it {
- Either::Left(it) => it.into(),
- Either::Right(it) => it.into(),
- },
- }))
- .range,
+ Either::Right(it) => it.into(),
+ },
+ }),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
index de7f51f69..553defcf9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -1,18 +1,19 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: private-field
//
// This diagnostic is triggered if the accessed field is not visible from the current module.
pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) -> Diagnostic {
// FIXME: add quickfix
- Diagnostic::new(
- "private-field",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0616"),
format!(
"field `{}` of `{}` is private",
d.field.name(ctx.sema.db).display(ctx.sema.db),
d.field.parent_def(ctx.sema.db).name(ctx.sema.db).display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index d3eda3c5e..083ef3e8d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -6,7 +6,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: replace-filter-map-next-with-find-map
//
@@ -15,12 +15,12 @@ pub(crate) fn replace_filter_map_next_with_find_map(
ctx: &DiagnosticsContext<'_>,
d: &hir::ReplaceFilterMapNextWithFindMap,
) -> Diagnostic {
- Diagnostic::new(
- "replace-filter-map-next-with-find-map",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::Clippy("filter_map_next"),
"replace filter_map(..).next() with find_map(..)",
- ctx.sema.diagnostics_display_range(InFile::new(d.file, d.next_expr.clone().into())).range,
+ InFile::new(d.file, d.next_expr.clone().into()),
)
- .severity(Severity::WeakWarning)
.with_fixes(fixes(ctx, d))
}
@@ -64,7 +64,7 @@ mod tests {
pub(crate) fn check_diagnostics(ra_fixture: &str) {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("inactive-code".to_string());
- config.disabled.insert("unresolved-method".to_string());
+ config.disabled.insert("E0599".to_string());
check_diagnostics_with_config(config, ra_fixture)
}
@@ -139,4 +139,33 @@ fn foo() {
"#,
)
}
+
+ #[test]
+ fn respect_lint_attributes_for_clippy_equivalent() {
+ check_diagnostics(
+ r#"
+//- minicore: iterators
+
+fn foo() {
+ #[allow(clippy::filter_map_next)]
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+}
+
+#[deny(clippy::filter_map_next)]
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
+
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
+
+#[warn(clippy::filter_map_next)]
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
+
+"#,
+ );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index c28f98d83..15bd28c00 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -7,7 +7,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticsContext};
+use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: type-mismatch
//
@@ -39,7 +39,7 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
}
};
let mut diag = Diagnostic::new(
- "type-mismatch",
+ DiagnosticCode::RustcHardError("E0308"),
format!(
"expected {}, found {}",
d.expected.display(ctx.sema.db).with_closure_style(ClosureStyle::ClosureWithId),
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
index e12bbcf68..4af672271 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -7,7 +7,7 @@ use ide_db::{
use syntax::AstNode;
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: typed-hole
//
@@ -26,7 +26,8 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
)
};
- Diagnostic::new("typed-hole", message, display_range.range).with_fixes(fixes)
+ Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range)
+ .with_fixes(fixes)
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs
index 034e4fcfb..7de9a9a32 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: undeclared-label
pub(crate) fn undeclared_label(
@@ -6,10 +6,11 @@ pub(crate) fn undeclared_label(
d: &hir::UndeclaredLabel,
) -> Diagnostic {
let name = &d.name;
- Diagnostic::new(
- "undeclared-label",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("undeclared-label"),
format!("use of undeclared label `{}`", name.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.node.clone().map(|it| it.into())).range,
+ d.node.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
index e879de75c..bcce72a7d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: unimplemented-builtin-macro
//
@@ -7,10 +7,10 @@ pub(crate) fn unimplemented_builtin_macro(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnimplementedBuiltinMacro,
) -> Diagnostic {
- Diagnostic::new(
- "unimplemented-builtin-macro",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::Ra("unimplemented-builtin-macro", Severity::WeakWarning),
"unimplemented built-in macro".to_string(),
- ctx.sema.diagnostics_display_range(d.node.clone()).range,
+ d.node.clone(),
)
- .severity(Severity::WeakWarning)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index 271e7ce73..e04f27c27 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -14,7 +14,7 @@ use syntax::{
};
use text_edit::TextEdit;
-use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity};
+use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: unlinked-file
//
@@ -46,8 +46,7 @@ pub(crate) fn unlinked_file(
.unwrap_or(range);
acc.push(
- Diagnostic::new("unlinked-file", message, range)
- .severity(Severity::WeakWarning)
+ Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range)
.with_fixes(fixes),
);
}
@@ -119,10 +118,11 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
stack.pop();
'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
let crate_def_map = ctx.sema.db.crate_def_map(krate);
- let Some((_, module)) =
- crate_def_map.modules()
- .find(|(_, module)| module.origin.file_id() == Some(parent_id) && !module.origin.is_inline())
- else { continue };
+ let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
+ module.origin.file_id() == Some(parent_id) && !module.origin.is_inline()
+ }) else {
+ continue;
+ };
if stack.is_empty() {
return make_fixes(
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs
index 9fedadeae..1c5d6cd09 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unreachable-label
pub(crate) fn unreachable_label(
@@ -6,10 +6,11 @@ pub(crate) fn unreachable_label(
d: &hir::UnreachableLabel,
) -> Diagnostic {
let name = &d.name;
- Diagnostic::new(
- "unreachable-label",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0767"),
format!("use of unreachable label `{}`", name.display(ctx.sema.db)),
- ctx.sema.diagnostics_display_range(d.node.clone().map(|it| it.into())).range,
+ d.node.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
index 74e4a69c6..f8265b632 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-extern-crate
//
@@ -7,10 +7,11 @@ pub(crate) fn unresolved_extern_crate(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedExternCrate,
) -> Diagnostic {
- Diagnostic::new(
- "unresolved-extern-crate",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("unresolved-extern-crate"),
"unresolved extern crate",
- ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ d.decl.clone().map(|it| it.into()),
)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index 5e4efa41f..0758706e4 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, AstPtr};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-field
//
@@ -22,14 +22,15 @@ pub(crate) fn unresolved_field(
} else {
""
};
- Diagnostic::new(
- "unresolved-field",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0559"),
format!(
"no field `{}` on type `{}`{method_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
.experimental()
@@ -67,7 +68,10 @@ fn method_fix(
}
#[cfg(test)]
mod tests {
- use crate::tests::check_diagnostics;
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config},
+ DiagnosticsConfig,
+ };
#[test]
fn smoke_test() {
@@ -145,4 +149,11 @@ fn foo() {
"#,
);
}
+
+ #[test]
+ fn no_diagnostic_for_missing_name() {
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("syntax-error".to_owned());
+ check_diagnostics_with_config(config, "fn foo() { (). }");
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
index e52a88459..6b8026c03 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-import
//
@@ -8,10 +8,11 @@ pub(crate) fn unresolved_import(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedImport,
) -> Diagnostic {
- Diagnostic::new(
- "unresolved-import",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0432"),
"unresolved import",
- ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ d.decl.clone().map(|it| it.into()),
)
// This currently results in false positives in the following cases:
// - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index 3943b51ab..33e7c2e37 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -1,4 +1,4 @@
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-macro-call
//
@@ -12,7 +12,7 @@ pub(crate) fn unresolved_macro_call(
let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location);
let bang = if d.is_bang { "!" } else { "" };
Diagnostic::new(
- "unresolved-macro-call",
+ DiagnosticCode::RustcHardError("unresolved-macro-call"),
format!("unresolved macro `{}{bang}`", d.path.display(ctx.sema.db)),
display_range,
)
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 8bbb837e6..ae9f6744c 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -8,7 +8,7 @@ use ide_db::{
use syntax::{ast, AstNode, TextRange};
use text_edit::TextEdit;
-use crate::{Diagnostic, DiagnosticsContext};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-method
//
@@ -22,14 +22,15 @@ pub(crate) fn unresolved_method(
} else {
""
};
- Diagnostic::new(
- "unresolved-method",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0599"),
format!(
"no method `{}` on type `{}`{field_suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
- ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range,
+ d.expr.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
.experimental()
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index 6e3fd3b42..be24e50c9 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -3,7 +3,7 @@ use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSyste
use itertools::Itertools;
use syntax::AstNode;
-use crate::{fix, Diagnostic, DiagnosticsContext};
+use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unresolved-module
//
@@ -12,8 +12,9 @@ pub(crate) fn unresolved_module(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedModule,
) -> Diagnostic {
- Diagnostic::new(
- "unresolved-module",
+ Diagnostic::new_with_syntax_node_ptr(
+ ctx,
+ DiagnosticCode::RustcHardError("E0583"),
match &*d.candidates {
[] => "unresolved module".to_string(),
[candidate] => format!("unresolved module, can't find module file: {candidate}"),
@@ -25,7 +26,7 @@ pub(crate) fn unresolved_module(
)
}
},
- ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range,
+ d.decl.clone().map(|it| it.into()),
)
.with_fixes(fixes(ctx, d))
}
@@ -82,8 +83,8 @@ mod baz {}
expect![[r#"
[
Diagnostic {
- code: DiagnosticCode(
- "unresolved-module",
+ code: RustcHardError(
+ "E0583",
),
message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
range: 0..8,
@@ -148,6 +149,22 @@ mod baz {}
},
],
),
+ main_node: Some(
+ InFile {
+ file_id: FileId(
+ FileId(
+ 0,
+ ),
+ ),
+ value: MODULE@0..8
+ MOD_KW@0..3 "mod"
+ WHITESPACE@3..4 " "
+ NAME@4..7
+ IDENT@4..7 "foo"
+ SEMICOLON@7..8 ";"
+ ,
+ },
+ ),
},
]
"#]],
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
index ae5cf1358..015a3d6b2 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
@@ -1,6 +1,6 @@
use hir::db::DefDatabase;
-use crate::{Diagnostic, DiagnosticsContext, Severity};
+use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity};
// Diagnostic: unresolved-proc-macro
//
@@ -41,5 +41,5 @@ pub(crate) fn unresolved_proc_macro(
};
let message = format!("{not_expanded_message}: {message}");
- Diagnostic::new("unresolved-proc-macro", message, display_range).severity(severity)
+ Diagnostic::new(DiagnosticCode::Ra("unresolved-proc-macro", severity), message, display_range)
}
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
index 289ed0458..0aa439f79 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs
@@ -1,9 +1,9 @@
use ide_db::{base_db::FileId, source_change::SourceChange};
use itertools::Itertools;
-use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use syntax::{ast, AstNode, SyntaxNode};
use text_edit::TextEdit;
-use crate::{fix, Diagnostic, Severity};
+use crate::{fix, Diagnostic, DiagnosticCode};
// Diagnostic: unnecessary-braces
//
@@ -15,6 +15,11 @@ pub(crate) fn useless_braces(
) -> Option<()> {
let use_tree_list = ast::UseTreeList::cast(node.clone())?;
if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
+ // If there is a `self` inside the bracketed `use`, don't show diagnostic.
+ if single_use_tree.path()?.segment()?.self_token().is_some() {
+ return Some(());
+ }
+
// If there is a comment inside the bracketed `use`,
// assume it is a commented out module path and don't show diagnostic.
if use_tree_list.has_inner_comment() {
@@ -22,21 +27,18 @@ pub(crate) fn useless_braces(
}
let use_range = use_tree_list.syntax().text_range();
- let edit = remove_braces(&single_use_tree).unwrap_or_else(|| {
- let to_replace = single_use_tree.syntax().text().to_string();
- let mut edit_builder = TextEdit::builder();
- edit_builder.delete(use_range);
- edit_builder.insert(use_range.start(), to_replace);
- edit_builder.finish()
- });
+ let to_replace = single_use_tree.syntax().text().to_string();
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(use_range);
+ edit_builder.insert(use_range.start(), to_replace);
+ let edit = edit_builder.finish();
acc.push(
Diagnostic::new(
- "unnecessary-braces",
+ DiagnosticCode::RustcLint("unused_braces"),
"Unnecessary braces in use statement".to_string(),
use_range,
)
- .severity(Severity::WeakWarning)
.with_fixes(Some(vec![fix(
"remove_braces",
"Remove unnecessary braces",
@@ -49,19 +51,12 @@ pub(crate) fn useless_braces(
Some(())
}
-fn remove_braces(single_use_tree: &ast::UseTree) -> Option<TextEdit> {
- let use_tree_list_node = single_use_tree.syntax().parent()?;
- if single_use_tree.path()?.segment()?.self_token().is_some() {
- let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
- let end = use_tree_list_node.text_range().end();
- return Some(TextEdit::delete(TextRange::new(start, end)));
- }
- None
-}
-
#[cfg(test)]
mod tests {
- use crate::tests::{check_diagnostics, check_fix};
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config, check_fix},
+ DiagnosticsConfig,
+ };
#[test]
fn test_check_unnecessary_braces_in_use_statement() {
@@ -94,6 +89,32 @@ mod a {
}
"#,
);
+ check_diagnostics(
+ r#"
+use a::{self};
+
+mod a {
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+use a::{self as cool_name};
+
+mod a {
+}
+"#,
+ );
+
+ let mut config = DiagnosticsConfig::test_sample();
+ config.disabled.insert("syntax-error".to_string());
+ check_diagnostics_with_config(
+ config,
+ r#"
+mod a { pub mod b {} }
+use a::{b::self};
+"#,
+ );
check_fix(
r#"
mod b {}
@@ -126,16 +147,6 @@ use a::c;
);
check_fix(
r#"
-mod a {}
-use a::{self$0};
-"#,
- r#"
-mod a {}
-use a;
-"#,
- );
- check_fix(
- r#"
mod a { pub mod c {} pub mod d { pub mod e {} } }
use a::{c, d::{e$0}};
"#,
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index 55a4a482d..b1b9b4b8e 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -67,24 +67,61 @@ mod handlers {
#[cfg(test)]
mod tests;
+use std::collections::HashMap;
+
use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
use ide_db::{
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
base_db::{FileId, FileRange, SourceDatabase},
+ generated::lints::{LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS},
imports::insert_use::InsertUseConfig,
label::Label,
source_change::SourceChange,
- FxHashSet, RootDatabase,
+ syntax_helpers::node_ext::parse_tt_as_comma_sep_paths,
+ FxHashMap, FxHashSet, RootDatabase,
+};
+use once_cell::sync::Lazy;
+use stdx::never;
+use syntax::{
+ algo::find_node_at_range,
+ ast::{self, AstNode},
+ SyntaxNode, SyntaxNodePtr, TextRange,
};
-use syntax::{algo::find_node_at_range, ast::AstNode, SyntaxNodePtr, TextRange};
// FIXME: Make this an enum
-#[derive(Copy, Clone, Debug, PartialEq)]
-pub struct DiagnosticCode(pub &'static str);
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum DiagnosticCode {
+ RustcHardError(&'static str),
+ RustcLint(&'static str),
+ Clippy(&'static str),
+ Ra(&'static str, Severity),
+}
impl DiagnosticCode {
- pub fn as_str(&self) -> &str {
- self.0
+ pub fn url(&self) -> String {
+ match self {
+ DiagnosticCode::RustcHardError(e) => {
+ format!("https://doc.rust-lang.org/stable/error_codes/{e}.html")
+ }
+ DiagnosticCode::RustcLint(e) => {
+ format!("https://doc.rust-lang.org/rustc/?search={e}")
+ }
+ DiagnosticCode::Clippy(e) => {
+ format!("https://rust-lang.github.io/rust-clippy/master/#/{e}")
+ }
+ DiagnosticCode::Ra(e, _) => {
+ format!("https://rust-analyzer.github.io/manual.html#{e}")
+ }
+ }
+ }
+
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ DiagnosticCode::RustcHardError(r)
+ | DiagnosticCode::RustcLint(r)
+ | DiagnosticCode::Clippy(r)
+ | DiagnosticCode::Ra(r, _) => r,
+ }
}
}
@@ -97,29 +134,51 @@ pub struct Diagnostic {
pub unused: bool,
pub experimental: bool,
pub fixes: Option<Vec<Assist>>,
+ // The node that will be affected by `#[allow]` and similar attributes.
+ pub main_node: Option<InFile<SyntaxNode>>,
}
impl Diagnostic {
- fn new(code: &'static str, message: impl Into<String>, range: TextRange) -> Diagnostic {
+ fn new(code: DiagnosticCode, message: impl Into<String>, range: TextRange) -> Diagnostic {
let message = message.into();
Diagnostic {
- code: DiagnosticCode(code),
+ code,
message,
range,
- severity: Severity::Error,
+ severity: match code {
+ DiagnosticCode::RustcHardError(_) => Severity::Error,
+ // FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings.
+ DiagnosticCode::RustcLint(_) => Severity::Warning,
+ // FIXME: We can make this configurable, and if the user uses `cargo clippy` on flycheck, we can
+ // make it normal warning.
+ DiagnosticCode::Clippy(_) => Severity::WeakWarning,
+ DiagnosticCode::Ra(_, s) => s,
+ },
unused: false,
experimental: false,
fixes: None,
+ main_node: None,
}
}
+ fn new_with_syntax_node_ptr(
+ ctx: &DiagnosticsContext<'_>,
+ code: DiagnosticCode,
+ message: impl Into<String>,
+ node: InFile<SyntaxNodePtr>,
+ ) -> Diagnostic {
+ let file_id = node.file_id;
+ Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range)
+ .with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id))))
+ }
+
fn experimental(mut self) -> Diagnostic {
self.experimental = true;
self
}
- fn severity(mut self, severity: Severity) -> Diagnostic {
- self.severity = severity;
+ fn with_main_node(mut self, main_node: InFile<SyntaxNode>) -> Diagnostic {
+ self.main_node = Some(main_node);
self
}
@@ -134,12 +193,12 @@ impl Diagnostic {
}
}
-#[derive(Debug, Copy, Clone)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Severity {
Error,
- // We don't actually emit this one yet, but we should at some point.
- // Warning,
+ Warning,
WeakWarning,
+ Allow,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -155,6 +214,8 @@ impl Default for ExprFillDefaultMode {
#[derive(Debug, Clone)]
pub struct DiagnosticsConfig {
+ /// Whether native diagnostics are enabled.
+ pub enabled: bool,
pub proc_macros_enabled: bool,
pub proc_attr_macros_enabled: bool,
pub disable_experimental: bool,
@@ -171,6 +232,7 @@ impl DiagnosticsConfig {
use ide_db::imports::insert_use::ImportGranularity;
Self {
+ enabled: true,
proc_macros_enabled: Default::default(),
proc_attr_macros_enabled: Default::default(),
disable_experimental: Default::default(),
@@ -194,7 +256,7 @@ struct DiagnosticsContext<'a> {
resolve: &'a AssistResolveStrategy,
}
-impl<'a> DiagnosticsContext<'a> {
+impl DiagnosticsContext<'_> {
fn resolve_precise_location(
&self,
node: &InFile<SyntaxNodePtr>,
@@ -228,11 +290,13 @@ pub fn diagnostics(
let mut res = Vec::new();
// [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
- res.extend(
- parse.errors().iter().take(128).map(|err| {
- Diagnostic::new("syntax-error", format!("Syntax Error: {err}"), err.range())
- }),
- );
+ res.extend(parse.errors().iter().take(128).map(|err| {
+ Diagnostic::new(
+ DiagnosticCode::RustcHardError("syntax-error"),
+ format!("Syntax Error: {err}"),
+ err.range(),
+ )
+ }));
let parse = sema.parse(file_id);
@@ -271,7 +335,7 @@ pub fn diagnostics(
res.extend(d.errors.iter().take(32).map(|err| {
{
Diagnostic::new(
- "syntax-error",
+ DiagnosticCode::RustcHardError("syntax-error"),
format!("Syntax Error in Expansion: {err}"),
ctx.resolve_precise_location(&d.node.clone(), d.precise_location),
)
@@ -309,14 +373,168 @@ pub fn diagnostics(
res.push(d)
}
+ let mut diagnostics_of_range =
+ res.iter_mut().filter_map(|x| Some((x.main_node.clone()?, x))).collect::<FxHashMap<_, _>>();
+
+ let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
+ let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default();
+
+ handle_lint_attributes(
+ &ctx.sema,
+ parse.syntax(),
+ &mut rustc_stack,
+ &mut clippy_stack,
+ &mut diagnostics_of_range,
+ );
+
res.retain(|d| {
- !ctx.config.disabled.contains(d.code.as_str())
+ d.severity != Severity::Allow
+ && !ctx.config.disabled.contains(d.code.as_str())
&& !(ctx.config.disable_experimental && d.experimental)
});
res
}
+// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
+
+static RUSTC_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
+ Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], ""));
+
+static CLIPPY_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
+ Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::"));
+
+fn build_group_dict(
+ lint_group: &'static [LintGroup],
+ all_groups: &'static [&'static str],
+ prefix: &'static str,
+) -> HashMap<&'static str, Vec<&'static str>> {
+ let mut r: HashMap<&str, Vec<&str>> = HashMap::new();
+ for g in lint_group {
+ for child in g.children {
+ r.entry(child.strip_prefix(prefix).unwrap())
+ .or_default()
+ .push(g.lint.label.strip_prefix(prefix).unwrap());
+ }
+ }
+ for (lint, groups) in r.iter_mut() {
+ groups.push(lint);
+ groups.extend_from_slice(all_groups);
+ }
+ r
+}
+
+fn handle_lint_attributes(
+ sema: &Semantics<'_, RootDatabase>,
+ root: &SyntaxNode,
+ rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
+ clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
+ diagnostics_of_range: &mut FxHashMap<InFile<SyntaxNode>, &mut Diagnostic>,
+) {
+ let file_id = sema.hir_file_for(root);
+ for ev in root.preorder() {
+ match ev {
+ syntax::WalkEvent::Enter(node) => {
+ for attr in node.children().filter_map(ast::Attr::cast) {
+ parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| {
+ stack.push(severity);
+ });
+ }
+ if let Some(x) =
+ diagnostics_of_range.get_mut(&InFile { file_id, value: node.clone() })
+ {
+ const EMPTY_LINTS: &[&str] = &[];
+ let (names, stack) = match x.code {
+ DiagnosticCode::RustcLint(name) => (
+ RUSTC_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x),
+ &mut *rustc_stack,
+ ),
+ DiagnosticCode::Clippy(name) => (
+ CLIPPY_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x),
+ &mut *clippy_stack,
+ ),
+ _ => continue,
+ };
+ for &name in names {
+ if let Some(s) = stack.get(name).and_then(|x| x.last()) {
+ x.severity = *s;
+ }
+ }
+ }
+ if let Some(item) = ast::Item::cast(node.clone()) {
+ if let Some(me) = sema.expand_attr_macro(&item) {
+ for stack in [&mut *rustc_stack, &mut *clippy_stack] {
+ stack
+ .entry("__RA_EVERY_LINT".to_owned())
+ .or_default()
+ .push(Severity::Allow);
+ }
+ handle_lint_attributes(
+ sema,
+ &me,
+ rustc_stack,
+ clippy_stack,
+ diagnostics_of_range,
+ );
+ for stack in [&mut *rustc_stack, &mut *clippy_stack] {
+ stack.entry("__RA_EVERY_LINT".to_owned()).or_default().pop();
+ }
+ }
+ }
+ if let Some(mc) = ast::MacroCall::cast(node) {
+ if let Some(me) = sema.expand(&mc) {
+ handle_lint_attributes(
+ sema,
+ &me,
+ rustc_stack,
+ clippy_stack,
+ diagnostics_of_range,
+ );
+ }
+ }
+ }
+ syntax::WalkEvent::Leave(node) => {
+ for attr in node.children().filter_map(ast::Attr::cast) {
+ parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| {
+ if stack.pop() != Some(severity) {
+ never!("Mismatched serevity in walking lint attributes");
+ }
+ });
+ }
+ }
+ }
+ }
+}
+
+fn parse_lint_attribute(
+ attr: ast::Attr,
+ rustc_stack: &mut FxHashMap<String, Vec<Severity>>,
+ clippy_stack: &mut FxHashMap<String, Vec<Severity>>,
+ job: impl Fn(&mut Vec<Severity>, Severity),
+) {
+ let Some((tag, args_tt)) = attr.as_simple_call() else {
+ return;
+ };
+ let serevity = match tag.as_str() {
+ "allow" => Severity::Allow,
+ "warn" => Severity::Warning,
+ "forbid" | "deny" => Severity::Error,
+ _ => return,
+ };
+ for lint in parse_tt_as_comma_sep_paths(args_tt).into_iter().flatten() {
+ if let Some(lint) = lint.as_single_name_ref() {
+ job(rustc_stack.entry(lint.to_string()).or_default(), serevity);
+ }
+ if let Some(tool) = lint.qualifier().and_then(|x| x.as_single_name_ref()) {
+ if let Some(name_ref) = &lint.segment().and_then(|x| x.name_ref()) {
+ if tool.to_string() == "clippy" {
+ job(clippy_stack.entry(name_ref.to_string()).or_default(), serevity);
+ }
+ }
+ }
+ }
+}
+
fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
let mut res = unresolved_fix(id, label, target);
res.source_change = Some(source_change);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
index b5cd4e0d6..ee0e03549 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -49,8 +49,11 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
let file_id = *source_change.source_file_edits.keys().next().unwrap();
let mut actual = db.file_text(file_id).to_string();
- for edit in source_change.source_file_edits.values() {
+ for (edit, snippet_edit) in source_change.source_file_edits.values() {
edit.apply(&mut actual);
+ if let Some(snippet_edit) = snippet_edit {
+ snippet_edit.apply(&mut actual);
+ }
}
actual
};
@@ -114,6 +117,8 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
annotation.push_str(match d.severity {
Severity::Error => "error",
Severity::WeakWarning => "weak",
+ Severity::Warning => "warn",
+ Severity::Allow => "allow",
});
annotation.push_str(": ");
annotation.push_str(&d.message);
@@ -130,14 +135,19 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
)
}
}
- assert_eq!(expected, actual);
+ if expected != actual {
+ let fneg = expected.iter().filter(|x| !actual.contains(x)).collect::<Vec<_>>();
+ let fpos = actual.iter().filter(|x| !expected.contains(x)).collect::<Vec<_>>();
+
+ panic!("Diagnostic test failed.\nFalse negatives: {fneg:?}\nFalse positives: {fpos:?}");
+ }
}
}
#[test]
fn test_disabled_diagnostics() {
let mut config = DiagnosticsConfig::test_sample();
- config.disabled.insert("unresolved-module".into());
+ config.disabled.insert("E0583".into());
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
@@ -159,7 +169,7 @@ fn minicore_smoke_test() {
let source = minicore.source_code();
let mut config = DiagnosticsConfig::test_sample();
// This should be ignored since we conditionaly remove code which creates single item use with braces
- config.disabled.insert("unnecessary-braces".to_string());
+ config.disabled.insert("unused_braces".to_string());
check_diagnostics_with_config(config, &source);
}
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
index 0a85569b6..ca76d0a87 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -22,7 +22,7 @@ pub(crate) struct UsageCache {
usages: Vec<(Definition, UsageSearchResult)>,
}
-impl<'db> MatchFinder<'db> {
+impl MatchFinder<'_> {
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
/// replacement impossible, so further processing is required in order to properly nest matches
/// and remove overlapping matches. This is done in the `nesting` module.
@@ -121,7 +121,7 @@ impl<'db> MatchFinder<'db> {
// cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
// lookups in the case of a cache hit.
if usage_cache.find(&definition).is_none() {
- let usages = definition.usages(&self.sema).in_scope(self.search_scope()).all();
+ let usages = definition.usages(&self.sema).in_scope(&self.search_scope()).all();
usage_cache.usages.push((definition, usages));
return &usage_cache.usages.last().unwrap().1;
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 8112c4f72..d240127f3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -153,6 +153,9 @@ pub(crate) fn external_docs(
NameRefClass::FieldShorthand { local_ref: _, field_ref } => {
Definition::Field(field_ref)
}
+ NameRefClass::ExternCrateShorthand { decl, .. } => {
+ Definition::ExternCrateDecl(decl)
+ }
},
ast::Name(name) => match NameClass::classify(sema, &name)? {
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
@@ -209,6 +212,7 @@ pub(crate) fn resolve_doc_path_for_def(
Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
Definition::Field(it) => it.resolve_doc_path(db, link, ns),
Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
+ Definition::ExternCrateDecl(it) => it.resolve_doc_path(db, link, ns),
Definition::BuiltinAttr(_)
| Definition::ToolModule(_)
| Definition::BuiltinType(_)
@@ -330,7 +334,9 @@ fn get_doc_links(
base_url.and_then(|url| url.join(path).ok())
};
- let Some((target, file, frag)) = filename_and_frag_for_def(db, def) else { return Default::default(); };
+ let Some((target, file, frag)) = filename_and_frag_for_def(db, def) else {
+ return Default::default();
+ };
let (mut web_url, mut local_url) = get_doc_base_urls(db, target, target_dir, sysroot);
@@ -615,6 +621,9 @@ fn filename_and_frag_for_def(
// FIXME fragment numbering
return Some((adt, file, Some(String::from("impl"))));
}
+ Definition::ExternCrateDecl(it) => {
+ format!("{}/index.html", it.name(db).display(db.upcast()))
+ }
Definition::Local(_)
| Definition::GenericParam(_)
| Definition::Label(_)
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
index e70bc2ec5..c39c696cf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -37,11 +37,15 @@ pub(crate) fn goto_declaration(
match parent {
ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? {
NameRefClass::Definition(it) => Some(it),
- NameRefClass::FieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db),
+ NameRefClass::FieldShorthand { field_ref, .. } =>
+ return field_ref.try_to_nav(db),
+ NameRefClass::ExternCrateShorthand { decl, .. } =>
+ return decl.try_to_nav(db),
},
ast::Name(name) => match NameClass::classify(&sema, &name)? {
NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it),
- NameClass::PatFieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db),
+ NameClass::PatFieldShorthand { field_ref, .. } =>
+ return field_ref.try_to_nav(db),
},
_ => None
}
@@ -53,6 +57,7 @@ pub(crate) fn goto_declaration(
Definition::Const(c) => c.as_assoc_item(db),
Definition::TypeAlias(ta) => ta.as_assoc_item(db),
Definition::Function(f) => f.as_assoc_item(db),
+ Definition::ExternCrateDecl(it) => return it.try_to_nav(db),
_ => None,
}?;
@@ -211,4 +216,30 @@ fn main() {
"#,
);
}
+
+ #[test]
+ fn goto_decl_for_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std$0;
+ /// ^^^
+//- /std/lib.rs crate:std
+// empty
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_decl_for_renamed_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as abc$0;
+ /// ^^^
+//- /std/lib.rs crate:std
+// empty
+"#,
+ )
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index 4e641357e..21471ab2a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -1,6 +1,9 @@
use std::mem::discriminant;
-use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+use crate::{
+ doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
+ RangeInfo, TryToNav,
+};
use hir::{AsAssocItem, AssocItem, Semantics};
use ide_db::{
base_db::{AnchoredPath, FileId, FileLoader},
@@ -73,6 +76,13 @@ pub(crate) fn goto_definition(
.definitions()
.into_iter()
.flat_map(|def| {
+ if let Definition::ExternCrateDecl(crate_def) = def {
+ return crate_def
+ .resolved_crate(db)
+ .map(|it| it.root_module().to_nav(sema.db))
+ .into_iter()
+ .collect();
+ }
try_filter_trait_item_definition(sema, &def)
.unwrap_or_else(|| def_to_nav(sema.db, def))
})
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
index a1a119629..37166bdbd 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs
@@ -34,54 +34,50 @@ pub(crate) fn goto_implementation(
_ => 0,
})?;
let range = original_token.text_range();
- let navs = sema
- .descend_into_macros(original_token)
- .into_iter()
- .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
- .filter_map(|node| match &node {
- ast::NameLike::Name(name) => {
- NameClass::classify(&sema, name).map(|class| match class {
- NameClass::Definition(it) | NameClass::ConstReference(it) => it,
- NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
- Definition::Local(local_def)
+ let navs =
+ sema.descend_into_macros(original_token)
+ .into_iter()
+ .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
+ .filter_map(|node| match &node {
+ ast::NameLike::Name(name) => {
+ NameClass::classify(&sema, name).and_then(|class| match class {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it),
+ NameClass::PatFieldShorthand { .. } => None,
+ })
+ }
+ ast::NameLike::NameRef(name_ref) => NameRefClass::classify(&sema, name_ref)
+ .and_then(|class| match class {
+ NameRefClass::Definition(def) => Some(def),
+ NameRefClass::FieldShorthand { .. }
+ | NameRefClass::ExternCrateShorthand { .. } => None,
+ }),
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .unique()
+ .filter_map(|def| {
+ let navs = match def {
+ Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
+ Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
+ Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
+ Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
+ Definition::Function(f) => {
+ let assoc = f.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
}
- })
- }
- ast::NameLike::NameRef(name_ref) => {
- NameRefClass::classify(&sema, name_ref).map(|class| match class {
- NameRefClass::Definition(def) => def,
- NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
- Definition::Local(local_ref)
+ Definition::Const(c) => {
+ let assoc = c.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
}
- })
- }
- ast::NameLike::Lifetime(_) => None,
- })
- .unique()
- .filter_map(|def| {
- let navs = match def {
- Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
- Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
- Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
- Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
- Definition::Function(f) => {
- let assoc = f.as_assoc_item(sema.db)?;
- let name = assoc.name(sema.db)?;
- let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
- impls_for_trait_item(&sema, trait_, name)
- }
- Definition::Const(c) => {
- let assoc = c.as_assoc_item(sema.db)?;
- let name = assoc.name(sema.db)?;
- let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
- impls_for_trait_item(&sema, trait_, name)
- }
- _ => return None,
- };
- Some(navs)
- })
- .flatten()
- .collect();
+ _ => return None,
+ };
+ Some(navs)
+ })
+ .flatten()
+ .collect();
Some(RangeInfo { range, info: navs })
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 7e545491f..43e89a334 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -100,10 +100,7 @@ fn highlight_closure_captures(
.flat_map(|local| {
let usages = Definition::Local(local)
.usages(sema)
- .set_scope(Some(SearchScope::file_range(FileRange {
- file_id,
- range: search_range,
- })))
+ .in_scope(&SearchScope::file_range(FileRange { file_id, range: search_range }))
.include_self_refs()
.all()
.references
@@ -139,7 +136,7 @@ fn highlight_references(
.iter()
.filter_map(|&d| {
d.usages(sema)
- .set_scope(Some(SearchScope::single_file(file_id)))
+ .in_scope(&SearchScope::single_file(file_id))
.include_self_refs()
.all()
.references
@@ -183,7 +180,7 @@ fn highlight_references(
.filter_map(|item| {
Definition::from(item)
.usages(sema)
- .set_scope(Some(SearchScope::file_range(FileRange {
+ .set_scope(Some(&SearchScope::file_range(FileRange {
file_id,
range: trait_item_use_scope.text_range(),
})))
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index 5ef6ac948..40659e6c2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -9,7 +9,7 @@ use either::Either;
use hir::{db::DefDatabase, HasSource, LangItem, Semantics};
use ide_db::{
base_db::FileRange,
- defs::{Definition, IdentClass, OperatorClass},
+ defs::{Definition, IdentClass, NameRefClass, OperatorClass},
famous_defs::FamousDefs,
helpers::pick_best_token,
FxIndexSet, RootDatabase,
@@ -186,7 +186,20 @@ fn hover_simple(
// rendering poll is very confusing
return None;
}
- Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
+ if let IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand {
+ decl,
+ ..
+ }) = class
+ {
+ return Some(vec![(Definition::ExternCrateDecl(decl), node)]);
+ }
+ Some(
+ class
+ .definitions()
+ .into_iter()
+ .zip(iter::once(node).cycle())
+ .collect::<Vec<_>>(),
+ )
})
.flatten()
.unique_by(|&(def, _)| def)
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 136214641..a33a6ee18 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -257,7 +257,7 @@ pub(super) fn keyword(
let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent);
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
- let docs = doc_owner.attrs(sema.db).docs()?;
+ let docs = doc_owner.docs(sema.db)?;
let markup = process_markup(
sema.db,
Definition::Module(doc_owner),
@@ -422,10 +422,10 @@ pub(super) fn definition(
|&it| {
if !it.parent_enum(db).is_data_carrying(db) {
match it.eval(db) {
- Ok(x) => {
- Some(if x >= 10 { format!("{x} ({x:#X})") } else { format!("{x}") })
+ Ok(it) => {
+ Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") })
}
- Err(_) => it.value(db).map(|x| format!("{x:?}")),
+ Err(_) => it.value(db).map(|it| format!("{it:?}")),
}
} else {
None
@@ -437,7 +437,7 @@ pub(super) fn definition(
Definition::Const(it) => label_value_and_docs(db, it, |it| {
let body = it.render_eval(db);
match body {
- Ok(x) => Some(x),
+ Ok(it) => Some(it),
Err(_) => {
let source = it.source(db)?;
let mut body = source.value.body()?.syntax().clone();
@@ -472,6 +472,7 @@ pub(super) fn definition(
}
Definition::GenericParam(it) => label_and_docs(db, it),
Definition::Label(it) => return Some(Markup::fenced_block(&it.name(db).display(db))),
+ Definition::ExternCrateDecl(it) => label_and_docs(db, it),
// FIXME: We should be able to show more info about these
Definition::BuiltinAttr(it) => return render_builtin_attr(db, it),
Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))),
@@ -620,7 +621,7 @@ where
D: HasAttrs + HirDisplay,
{
let label = def.display(db).to_string();
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -645,7 +646,7 @@ where
) {
format_to!(label, "{layout}");
}
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -677,7 +678,7 @@ where
) {
format_to!(label, "{layout}");
}
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -696,7 +697,7 @@ where
} else {
def.display(db).to_string()
};
- let docs = def.attrs(db).docs();
+ let docs = def.docs(db);
(label, docs)
}
@@ -727,14 +728,14 @@ fn builtin(famous_defs: &FamousDefs<'_, '_>, builtin: hir::BuiltinType) -> Optio
// std exposes prim_{} modules with docstrings on the root to document the builtins
let primitive_mod = format!("prim_{}", builtin.name().display(famous_defs.0.db));
let doc_owner = find_std_module(famous_defs, &primitive_mod)?;
- let docs = doc_owner.attrs(famous_defs.0.db).docs()?;
+ let docs = doc_owner.docs(famous_defs.0.db)?;
markup(Some(docs.into()), builtin.name().display(famous_defs.0.db).to_string(), None)
}
fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {
let db = famous_defs.0.db;
let std_crate = famous_defs.std()?;
- let std_root_module = std_crate.root_module(db);
+ let std_root_module = std_crate.root_module();
std_root_module.children(db).find(|module| {
module.name(db).map_or(false, |module| module.display(db).to_string() == name)
})
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index f75ebfa12..ddc71dffa 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -674,7 +674,7 @@ struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 }
```
```rust
- field_a: u8 // size = 1, align = 1, offset = 4
+ field_a: u8 // size = 1, align = 1, offset = 6
```
"#]],
);
@@ -779,6 +779,39 @@ const foo$0: u32 = {
```
"#]],
);
+
+ check(
+ r#"const FOO$0: i32 = -2147483648;"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: i32 = -2147483648 (0x80000000)
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+ const FOO: i32 = -2147483648;
+ const BAR$0: bool = FOO > 0;
+ "#,
+ expect![[r#"
+ *BAR*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const BAR: bool = false
+ ```
+ "#]],
+ );
}
#[test]
@@ -1583,6 +1616,9 @@ fn test_hover_extern_crate() {
check(
r#"
//- /main.rs crate:main deps:std
+//! Crate docs
+
+/// Decl docs!
extern crate st$0d;
//- /std/lib.rs crate:std
//! Standard library for this test
@@ -1591,23 +1627,32 @@ extern crate st$0d;
//! abc123
"#,
expect![[r#"
- *std*
+ *std*
- ```rust
- extern crate std
- ```
+ ```rust
+ main
+ ```
- ---
+ ```rust
+ extern crate std
+ ```
+
+ ---
- Standard library for this test
+ Decl docs!
- Printed?
- abc123
- "#]],
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
);
check(
r#"
//- /main.rs crate:main deps:std
+//! Crate docs
+
+/// Decl docs!
extern crate std as ab$0c;
//- /std/lib.rs crate:std
//! Standard library for this test
@@ -1616,19 +1661,25 @@ extern crate std as ab$0c;
//! abc123
"#,
expect![[r#"
- *abc*
+ *abc*
- ```rust
- extern crate std
- ```
+ ```rust
+ main
+ ```
- ---
+ ```rust
+ extern crate std as abc
+ ```
- Standard library for this test
+ ---
- Printed?
- abc123
- "#]],
+ Decl docs!
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
index 10bee2a6a..6d6bd315e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
@@ -259,7 +259,7 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool,
}
})() else {
never!("broken syntax tree?\n{:?}\n{:?}", expr, dummy_expr);
- return (true, true)
+ return (true, true);
};
// At this point
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
index 84eac16b9..b621a8dda 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
@@ -474,7 +474,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9287..9295,
+ range: 9289..9297,
},
),
tooltip: "",
@@ -487,7 +487,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9319..9323,
+ range: 9321..9325,
},
),
tooltip: "",
@@ -511,7 +511,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9287..9295,
+ range: 9289..9297,
},
),
tooltip: "",
@@ -524,7 +524,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9319..9323,
+ range: 9321..9325,
},
),
tooltip: "",
@@ -548,7 +548,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9287..9295,
+ range: 9289..9297,
},
),
tooltip: "",
@@ -561,7 +561,7 @@ fn main() {
file_id: FileId(
1,
),
- range: 9319..9323,
+ range: 9321..9325,
},
),
tooltip: "",
diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
index cbcbb4b09..d06ffd535 100644
--- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs
@@ -34,13 +34,15 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin
_ => return None,
};
let span_formatter = |file_id, text_range: TextRange| {
- let line_col = db.line_index(file_id).line_col(text_range.start());
let path = &db
.source_root(db.file_source_root(file_id))
.path_for_file(&file_id)
.map(|x| x.to_string());
let path = path.as_deref().unwrap_or("<unknown file>");
- format!("file://{path}#{}:{}", line_col.line + 1, line_col.col)
+ match db.line_index(file_id).try_line_col(text_range.start()) {
+ Some(line_col) => format!("file://{path}#{}:{}", line_col.line + 1, line_col.col),
+ None => format!("file://{path} range {:?}", text_range),
+ }
};
Some(def.eval(db, span_formatter))
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index f195f78b3..bf77d55d5 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -60,6 +60,7 @@ mod interpret_function;
mod view_item_tree;
mod shuffle_crate_graph;
mod fetch_crates;
+mod view_memory_layout;
use std::ffi::OsStr;
@@ -74,6 +75,7 @@ use ide_db::{
};
use syntax::SourceFile;
use triomphe::Arc;
+use view_memory_layout::{view_memory_layout, RecursiveMemoryLayout};
use crate::navigation_target::{ToNav, TryToNav};
@@ -125,7 +127,7 @@ pub use ide_db::{
label::Label,
line_index::{LineCol, LineIndex},
search::{ReferenceCategory, SearchScope},
- source_change::{FileSystemEdit, SourceChange},
+ source_change::{FileSystemEdit, SnippetEdit, SourceChange},
symbol_index::Query,
RootDatabase, SymbolKind,
};
@@ -642,7 +644,7 @@ impl Analysis {
};
self.with_db(|db| {
- let diagnostic_assists = if include_fixes {
+ let diagnostic_assists = if diagnostics_config.enabled && include_fixes {
ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
.into_iter()
.flat_map(|it| it.fixes.unwrap_or_default())
@@ -724,6 +726,13 @@ impl Analysis {
self.with_db(|db| move_item::move_item(db, range, direction))
}
+ pub fn get_recursive_memory_layout(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RecursiveMemoryLayout>> {
+ self.with_db(|db| view_memory_layout(db, position))
+ }
+
/// Performs an operation on the database that may be canceled.
///
/// rust-analyzer needs to be able to answer semantic questions about the
diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
index 0d57e63d2..17f3771b1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs
@@ -177,6 +177,17 @@ pub(crate) fn def_to_moniker(
});
}
+ // Qualify locals/parameters by their parent definition name.
+ if let Definition::Local(it) = def {
+ let parent_name = it.parent(db).name(db);
+ if let Some(name) = parent_name {
+ description.push(MonikerDescriptor {
+ name: name.display(db).to_string(),
+ desc: MonikerDescriptorKind::Method,
+ });
+ }
+ }
+
let name_desc = match def {
// These are handled by top-level guard (for performance).
Definition::GenericParam(_)
@@ -247,6 +258,10 @@ pub(crate) fn def_to_moniker(
name: s.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Meta,
},
+ Definition::ExternCrateDecl(m) => MonikerDescriptor {
+ name: m.name(db).display(db).to_string(),
+ desc: MonikerDescriptorKind::Namespace,
+ },
};
description.push(name_desc);
@@ -320,7 +335,7 @@ use foo::module::func;
fn main() {
func$0();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func() {}
}
@@ -336,7 +351,7 @@ use foo::module::func;
fn main() {
func();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func$0() {}
}
@@ -351,7 +366,7 @@ pub mod module {
fn moniker_for_trait() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func$0() {}
@@ -368,7 +383,7 @@ pub mod module {
fn moniker_for_trait_constant() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
const MY_CONST$0: u8;
@@ -385,7 +400,7 @@ pub mod module {
fn moniker_for_trait_type() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
type MyType$0;
@@ -402,7 +417,7 @@ pub mod module {
fn moniker_for_trait_impl_function() {
check_moniker(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func() {}
@@ -430,7 +445,7 @@ use foo::St;
fn main() {
let x = St { a$0: 2 };
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub struct St {
pub a: i32,
}
@@ -450,7 +465,7 @@ use foo::module::func;
fn main() {
func();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func() {
let x$0 = 2;
diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
index 385c1b0c0..d1479dd1e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs
@@ -102,7 +102,7 @@ impl NavigationTarget {
full_range,
SymbolKind::Module,
);
- res.docs = module.attrs(db).docs();
+ res.docs = module.docs(db);
res.description = Some(module.display(db).to_string());
return res;
}
@@ -217,6 +217,7 @@ impl TryToNav for Definition {
Definition::Trait(it) => it.try_to_nav(db),
Definition::TraitAlias(it) => it.try_to_nav(db),
Definition::TypeAlias(it) => it.try_to_nav(db),
+ Definition::ExternCrateDecl(it) => Some(it.try_to_nav(db)?),
Definition::BuiltinType(_) => None,
Definition::ToolModule(_) => None,
Definition::BuiltinAttr(_) => None,
@@ -357,13 +358,11 @@ impl ToNav for hir::Module {
impl TryToNav for hir::Impl {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let InFile { file_id, value } = self.source(db)?;
- let derive_attr = self.is_builtin_derive(db);
+ let derive_attr = self.as_builtin_derive(db);
- let focus = if derive_attr.is_some() { None } else { value.self_ty() };
-
- let syntax = match &derive_attr {
- Some(attr) => attr.value.syntax(),
- None => value.syntax(),
+ let (focus, syntax) = match &derive_attr {
+ Some(attr) => (None, attr.value.syntax()),
+ None => (value.self_ty(), value.syntax()),
};
let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
@@ -377,6 +376,30 @@ impl TryToNav for hir::Impl {
}
}
+impl TryToNav for hir::ExternCrateDecl {
+ fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
+ let src = self.source(db)?;
+ let InFile { file_id, value } = src;
+ let focus = value
+ .rename()
+ .map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right));
+ let (file_id, full_range, focus_range) =
+ orig_range_with_focus(db, file_id, value.syntax(), focus);
+ let mut res = NavigationTarget::from_syntax(
+ file_id,
+ self.alias_or_name(db).unwrap_or_else(|| self.name(db)).to_smol_str(),
+ focus_range,
+ full_range,
+ SymbolKind::Module,
+ );
+
+ res.docs = self.docs(db);
+ res.description = Some(self.display(db).to_string());
+ res.container_name = container_name(db, *self);
+ Some(res)
+ }
+}
+
impl TryToNav for hir::Field {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let src = self.source(db)?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index fdc5261ac..813f9ed94 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -74,7 +74,7 @@ pub(crate) fn find_all_refs(
}
});
let mut usages =
- def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
+ def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all();
if literal_search {
retain_adt_literal_usages(&mut usages, def, sema);
@@ -137,6 +137,9 @@ pub(crate) fn find_defs<'a>(
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
Definition::Local(local_ref)
}
+ NameRefClass::ExternCrateShorthand { decl, .. } => {
+ Definition::ExternCrateDecl(decl)
+ }
}
}
ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index e10c46381..dae8e71e8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -145,7 +145,14 @@ fn find_definitions(
if name
.syntax()
.parent()
- .map_or(false, |it| ast::Rename::can_cast(it.kind())) =>
+ .map_or(false, |it| ast::Rename::can_cast(it.kind()))
+ // FIXME: uncomment this once we resolve to usages to extern crate declarations
+ // && name
+ // .syntax()
+ // .ancestors()
+ // .nth(2)
+ // .map_or(true, |it| !ast::ExternCrate::can_cast(it.kind()))
+ =>
{
bail!("Renaming aliases is currently unsupported")
}
@@ -165,7 +172,12 @@ fn find_definitions(
NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
Definition::Local(local_ref)
}
+ NameRefClass::ExternCrateShorthand { decl, .. } => {
+ Definition::ExternCrateDecl(decl)
+ }
})
+ // FIXME: uncomment this once we resolve to usages to extern crate declarations
+ .filter(|def| !matches!(def, Definition::ExternCrateDecl(..)))
.ok_or_else(|| format_err!("No references found at position"))
.and_then(|def| {
// if the name differs from the definitions name it has to be an alias
@@ -367,7 +379,7 @@ mod tests {
let mut file_id: Option<FileId> = None;
for edit in source_change.source_file_edits {
file_id = Some(edit.0);
- for indel in edit.1.into_iter() {
+ for indel in edit.1 .0.into_iter() {
text_edit_builder.replace(indel.delete, indel.insert);
}
}
@@ -895,14 +907,17 @@ mod foo$0;
source_file_edits: {
FileId(
1,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 4..7,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -944,24 +959,30 @@ use crate::foo$0::FooContent;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "quux",
- delete: 8..11,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 8..11,
+ },
+ ],
+ },
+ None,
+ ),
FileId(
2,
- ): TextEdit {
- indels: [
- Indel {
- insert: "quux",
- delete: 11..14,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "quux",
+ delete: 11..14,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -997,14 +1018,17 @@ mod fo$0o;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 4..7,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveDir {
@@ -1047,14 +1071,17 @@ mod outer { mod fo$0o; }
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "bar",
- delete: 16..19,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "bar",
+ delete: 16..19,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1120,24 +1147,30 @@ pub mod foo$0;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 27..30,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 27..30,
+ },
+ ],
+ },
+ None,
+ ),
FileId(
1,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 8..11,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 8..11,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1187,14 +1220,17 @@ mod quux;
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo2",
- delete: 4..7,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo2",
+ delete: 4..7,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1325,18 +1361,21 @@ pub fn baz() {}
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "r#fn",
- delete: 4..7,
- },
- Indel {
- insert: "r#fn",
- delete: 22..25,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "r#fn",
+ delete: 4..7,
+ },
+ Indel {
+ insert: "r#fn",
+ delete: 22..25,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -1395,18 +1434,21 @@ pub fn baz() {}
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "foo",
- delete: 4..8,
- },
- Indel {
- insert: "foo",
- delete: 23..27,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "foo",
+ delete: 4..8,
+ },
+ Indel {
+ insert: "foo",
+ delete: 23..27,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [
MoveFile {
@@ -2487,4 +2529,109 @@ fn main() {
",
)
}
+
+ #[test]
+ fn extern_crate() {
+ check_prepare(
+ r"
+//- /lib.rs crate:main deps:foo
+extern crate foo$0;
+use foo as qux;
+//- /foo.rs crate:foo
+",
+ expect![[r#"No references found at position"#]],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
+ // check(
+ // "bar",
+ // r"
+ // //- /lib.rs crate:main deps:foo
+ // extern crate foo$0;
+ // use foo as qux;
+ // //- /foo.rs crate:foo
+ // ",
+ // r"
+ // extern crate foo as bar;
+ // use bar as qux;
+ // ",
+ // );
+ }
+
+ #[test]
+ fn extern_crate_rename() {
+ check_prepare(
+ r"
+//- /lib.rs crate:main deps:foo
+extern crate foo as qux$0;
+use qux as frob;
+//- /foo.rs crate:foo
+",
+ expect!["Renaming aliases is currently unsupported"],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate
+ // declarations
+ // check(
+ // "bar",
+ // r"
+ // //- /lib.rs crate:main deps:foo
+ // extern crate foo as qux$0;
+ // use qux as frob;
+ // //- /foo.rs crate:foo
+ // ",
+ // r"
+ // extern crate foo as bar;
+ // use bar as frob;
+ // ",
+ // );
+ }
+
+ #[test]
+ fn extern_crate_self() {
+ check_prepare(
+ r"
+extern crate self$0;
+use self as qux;
+",
+ expect!["No references found at position"],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
+ // check(
+ // "bar",
+ // r"
+ // extern crate self$0;
+ // use self as qux;
+ // ",
+ // r"
+ // extern crate self as bar;
+ // use self as qux;
+ // ",
+ // );
+ }
+
+ #[test]
+ fn extern_crate_self_rename() {
+ check_prepare(
+ r"
+//- /lib.rs crate:main deps:foo
+extern crate self as qux$0;
+use qux as frob;
+//- /foo.rs crate:foo
+",
+ expect!["Renaming aliases is currently unsupported"],
+ );
+ // FIXME: replace above check_prepare with this once we resolve to usages to extern crate declarations
+ // check(
+ // "bar",
+ // r"
+ // //- /lib.rs crate:main deps:foo
+ // extern crate self as qux$0;
+ // use qux as frob;
+ // //- /foo.rs crate:foo
+ // ",
+ // r"
+ // extern crate self as bar;
+ // use bar as frob;
+ // ",
+ // );
+ }
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index 27ad63d82..5f87a7855 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -2,7 +2,7 @@ use std::fmt;
use ast::HasName;
use cfg::CfgExpr;
-use hir::{AsAssocItem, HasAttrs, HasSource, Semantics};
+use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics};
use ide_assists::utils::test_related_attribute;
use ide_db::{
base_db::{FilePosition, FileRange},
@@ -14,7 +14,7 @@ use ide_db::{
use itertools::Itertools;
use stdx::{always, format_to};
use syntax::{
- ast::{self, AstNode, HasAttrs as _},
+ ast::{self, AstNode},
SmolStr, SyntaxNode,
};
@@ -232,7 +232,7 @@ fn find_related_tests(
for def in defs {
let defs = def
.usages(sema)
- .set_scope(search_scope.clone())
+ .set_scope(search_scope.as_ref())
.all()
.references
.into_values()
@@ -307,10 +307,9 @@ pub(crate) fn runnable_fn(
sema: &Semantics<'_, RootDatabase>,
def: hir::Function,
) -> Option<Runnable> {
- let func = def.source(sema.db)?;
let name = def.name(sema.db).to_smol_str();
- let root = def.module(sema.db).krate().root_module(sema.db);
+ let root = def.module(sema.db).krate().root_module();
let kind = if name == "main" && def.module(sema.db) == root {
RunnableKind::Bin
@@ -323,10 +322,10 @@ pub(crate) fn runnable_fn(
canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name))
};
- if test_related_attribute(&func.value).is_some() {
- let attr = TestAttr::from_fn(&func.value);
+ if def.is_test(sema.db) {
+ let attr = TestAttr::from_fn(sema.db, def);
RunnableKind::Test { test_id: test_id(), attr }
- } else if func.value.has_atom_attr("bench") {
+ } else if def.is_bench(sema.db) {
RunnableKind::Bench { test_id: test_id() }
} else {
return None;
@@ -335,7 +334,7 @@ pub(crate) fn runnable_fn(
let nav = NavigationTarget::from_named(
sema.db,
- func.as_ref().map(|it| it as &dyn ast::HasName),
+ def.source(sema.db)?.as_ref().map(|it| it as &dyn ast::HasName),
SymbolKind::Function,
);
let cfg = def.attrs(sema.db).cfg();
@@ -487,12 +486,8 @@ pub struct TestAttr {
}
impl TestAttr {
- fn from_fn(fn_def: &ast::Fn) -> TestAttr {
- let ignore = fn_def
- .attrs()
- .filter_map(|attr| attr.simple_name())
- .any(|attribute_text| attribute_text == "ignore");
- TestAttr { ignore }
+ fn from_fn(db: &dyn HirDatabase, fn_def: hir::Function) -> TestAttr {
+ TestAttr { ignore: fn_def.is_ignore(db) }
}
}
@@ -594,6 +589,9 @@ fn main() {}
#[test]
fn test_foo() {}
+#[::core::prelude::v1::test]
+fn test_full_path() {}
+
#[test]
#[ignore]
fn test_foo() {}
@@ -605,7 +603,7 @@ mod not_a_root {
fn main() {}
}
"#,
- &[TestMod, Bin, Test, Test, Bench],
+ &[TestMod, Bin, Test, Test, Test, Bench],
expect![[r#"
[
Runnable {
@@ -614,7 +612,7 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 0..137,
+ full_range: 0..190,
name: "",
kind: Module,
},
@@ -664,8 +662,29 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 41..75,
- focus_range: 62..70,
+ full_range: 41..92,
+ focus_range: 73..87,
+ name: "test_full_path",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_full_path",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 94..128,
+ focus_range: 115..123,
name: "test_foo",
kind: Function,
},
@@ -685,8 +704,8 @@ mod not_a_root {
file_id: FileId(
0,
),
- full_range: 77..99,
- focus_range: 89..94,
+ full_range: 130..152,
+ focus_range: 142..147,
name: "bench",
kind: Function,
},
diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
index deaf3c9c4..d8d81869a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/ssr.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
@@ -126,14 +126,17 @@ mod tests {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "3",
- delete: 33..34,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: false,
@@ -163,24 +166,30 @@ mod tests {
source_file_edits: {
FileId(
0,
- ): TextEdit {
- indels: [
- Indel {
- insert: "3",
- delete: 33..34,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 33..34,
+ },
+ ],
+ },
+ None,
+ ),
FileId(
1,
- ): TextEdit {
- indels: [
- Indel {
- insert: "3",
- delete: 11..12,
- },
- ],
- },
+ ): (
+ TextEdit {
+ indels: [
+ Indel {
+ insert: "3",
+ delete: 11..12,
+ },
+ ],
+ },
+ None,
+ ),
},
file_system_edits: [],
is_snippet: false,
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index 3e3d9f8f8..d8696198d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -3,13 +3,14 @@
use std::collections::HashMap;
-use hir::{db::HirDatabase, Crate, Module, Semantics};
+use hir::{db::HirDatabase, Crate, Module};
+use ide_db::helpers::get_definition;
use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt},
- defs::{Definition, IdentClass},
+ defs::Definition,
FxHashSet, RootDatabase,
};
-use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T};
+use syntax::{AstNode, SyntaxKind::*, TextRange, T};
use crate::{
hover::hover_for_definition,
@@ -73,7 +74,7 @@ impl TokenStore {
}
pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
- self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
+ self.0.into_iter().enumerate().map(|(id, data)| (TokenId(id), data))
}
}
@@ -87,7 +88,7 @@ pub struct StaticIndexedFile {
fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
let mut worklist: Vec<_> =
- Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ Crate::all(db).into_iter().map(|krate| krate.root_module()).collect();
let mut modules = Vec::new();
while let Some(module) = worklist.pop() {
@@ -132,9 +133,9 @@ impl StaticIndex<'_> {
// hovers
let sema = hir::Semantics::new(self.db);
let tokens_or_nodes = sema.parse(file_id).syntax().clone();
- let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
+ let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it {
syntax::NodeOrToken::Node(_) => None,
- syntax::NodeOrToken::Token(x) => Some(x),
+ syntax::NodeOrToken::Token(it) => Some(it),
});
let hover_config = HoverConfig {
links_in_hover: true,
@@ -154,28 +155,29 @@ impl StaticIndex<'_> {
let range = token.text_range();
let node = token.parent().unwrap();
let def = match get_definition(&sema, token.clone()) {
- Some(x) => x,
+ Some(it) => it,
None => continue,
};
- let id = if let Some(x) = self.def_map.get(&def) {
- *x
+ let id = if let Some(it) = self.def_map.get(&def) {
+ *it
} else {
- let x = self.tokens.insert(TokenStaticData {
+ let it = self.tokens.insert(TokenStaticData {
hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
- definition: def
- .try_to_nav(self.db)
- .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
+ definition: def.try_to_nav(self.db).map(|it| FileRange {
+ file_id: it.file_id,
+ range: it.focus_or_full_range(),
+ }),
references: vec![],
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
});
- self.def_map.insert(def, x);
- x
+ self.def_map.insert(def, it);
+ it
};
let token = self.tokens.get_mut(id).unwrap();
token.references.push(ReferenceData {
range: FileRange { range, file_id },
is_definition: match def.try_to_nav(self.db) {
- Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
+ Some(it) => it.file_id == file_id && it.focus_or_full_range() == range,
None => false,
},
});
@@ -187,7 +189,7 @@ impl StaticIndex<'_> {
pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
let db = &*analysis.db;
let work = all_modules(db).into_iter().filter(|module| {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
!source_root.is_library
@@ -201,7 +203,7 @@ impl StaticIndex<'_> {
};
let mut visited_files = FxHashSet::default();
for module in work {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
if visited_files.contains(&file_id) {
continue;
}
@@ -213,16 +215,6 @@ impl StaticIndex<'_> {
}
}
-fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
- for token in sema.descend_into_macros(token) {
- let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
- if let Some(&[x]) = def.as_deref() {
- return Some(x);
- }
- }
- None
-}
-
#[cfg(test)]
mod tests {
use crate::{fixture, StaticIndex};
@@ -233,14 +225,14 @@ mod tests {
fn check_all_ranges(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis);
- let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
for f in s.files {
for (range, _) in f.tokens {
- let x = FileRange { file_id: f.file_id, range };
- if !range_set.contains(&x) {
- panic!("additional range {x:?}");
+ let it = FileRange { file_id: f.file_id, range };
+ if !range_set.contains(&it) {
+ panic!("additional range {it:?}");
}
- range_set.remove(&x);
+ range_set.remove(&it);
}
}
if !range_set.is_empty() {
@@ -251,17 +243,17 @@ mod tests {
fn check_definitions(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis);
- let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
for (_, t) in s.tokens.iter() {
- if let Some(x) = t.definition {
- if x.range.start() == TextSize::from(0) {
+ if let Some(t) = t.definition {
+ if t.range.start() == TextSize::from(0) {
// ignore definitions that are whole of file
continue;
}
- if !range_set.contains(&x) {
- panic!("additional definition {x:?}");
+ if !range_set.contains(&t) {
+ panic!("additional definition {t:?}");
}
- range_set.remove(&x);
+ range_set.remove(&t);
}
}
if !range_set.is_empty() {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index dc06591ff..ae9723640 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -24,7 +24,7 @@ use syntax::{
use crate::{
syntax_highlighting::{
- escape::{highlight_escape_char, highlight_escape_string},
+ escape::{highlight_escape_byte, highlight_escape_char, highlight_escape_string},
format::highlight_format_string,
highlights::Highlights,
macro_::MacroHighlighter,
@@ -265,10 +265,14 @@ fn traverse(
// set macro and attribute highlighting states
match event.clone() {
- Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ Enter(NodeOrToken::Node(node))
+ if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
+ {
tt_level += 1;
}
- Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+ Leave(NodeOrToken::Node(node))
+ if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
+ {
tt_level -= 1;
}
Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
@@ -387,7 +391,7 @@ fn traverse(
};
let descended_element = if in_macro {
// Attempt to descend tokens into macro-calls.
- match element {
+ let res = match element {
NodeOrToken::Token(token) if token.kind() != COMMENT => {
let token = match attr_or_derive_item {
Some(AttrOrDerive::Attr(_)) => {
@@ -412,7 +416,8 @@ fn traverse(
}
}
e => e,
- }
+ };
+ res
} else {
element
};
@@ -466,6 +471,14 @@ fn traverse(
};
highlight_escape_char(hl, &char, range.start())
+ } else if ast::Byte::can_cast(token.kind())
+ && ast::Byte::can_cast(descended_token.kind())
+ {
+ let Some(byte) = ast::Byte::cast(token) else {
+ continue;
+ };
+
+ highlight_escape_byte(hl, &byte, range.start())
}
}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
index 211e35880..5913ca5e4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/escape.rs
@@ -1,7 +1,7 @@
//! Syntax highlighting for escape sequences
use crate::syntax_highlighting::highlights::Highlights;
use crate::{HlRange, HlTag};
-use syntax::ast::{Char, IsString};
+use syntax::ast::{Byte, Char, IsString};
use syntax::{AstToken, TextRange, TextSize};
pub(super) fn highlight_escape_string<T: IsString>(
@@ -10,14 +10,14 @@ pub(super) fn highlight_escape_string<T: IsString>(
start: TextSize,
) {
string.escaped_char_ranges(&mut |piece_range, char| {
- if char.is_err() {
- return;
- }
-
if string.text()[piece_range.start().into()..].starts_with('\\') {
+ let highlight = match char {
+ Ok(_) => HlTag::EscapeSequence,
+ Err(_) => HlTag::InvalidEscapeSequence,
+ };
stack.add(HlRange {
range: piece_range + start,
- highlight: HlTag::EscapeSequence.into(),
+ highlight: highlight.into(),
binding_hash: None,
});
}
@@ -26,6 +26,9 @@ pub(super) fn highlight_escape_string<T: IsString>(
pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start: TextSize) {
if char.value().is_none() {
+ // We do not emit invalid escapes highlighting here. The lexer would likely be in a bad
+ // state and this token contains junks, since `'` is not a reliable delimiter (consider
+ // lifetimes). Nonetheless, parser errors should already be emitted.
return;
}
@@ -43,3 +46,24 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start:
TextRange::new(start + TextSize::from(1), start + TextSize::from(text.len() as u32 + 1));
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
}
+
+pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start: TextSize) {
+ if byte.value().is_none() {
+ // See `highlight_escape_char` for why no error highlighting here.
+ return;
+ }
+
+ let text = byte.text();
+ if !text.starts_with("b'") || !text.ends_with('\'') {
+ return;
+ }
+
+ let text = &text[2..text.len() - 1];
+ if !text.starts_with('\\') {
+ return;
+ }
+
+ let range =
+ TextRange::new(start + TextSize::from(2), start + TextSize::from(text.len() as u32 + 2));
+ stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
index 3c40246a6..8e96bfa01 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs
@@ -269,7 +269,26 @@ fn highlight_name_ref(
h
}
- NameRefClass::FieldShorthand { .. } => SymbolKind::Field.into(),
+ NameRefClass::FieldShorthand { field_ref, .. } => {
+ highlight_def(sema, krate, field_ref.into())
+ }
+ NameRefClass::ExternCrateShorthand { decl, krate: resolved_krate } => {
+ let mut h = HlTag::Symbol(SymbolKind::Module).into();
+
+ if resolved_krate != krate {
+ h |= HlMod::Library
+ }
+ let is_public = decl.visibility(db) == hir::Visibility::Public;
+ if is_public {
+ h |= HlMod::Public
+ }
+ let is_from_builtin_crate = resolved_krate.is_builtin(db);
+ if is_from_builtin_crate {
+ h |= HlMod::DefaultLibrary;
+ }
+ h |= HlMod::CrateRoot;
+ h
+ }
};
h.tag = match name_ref.token_kind() {
@@ -474,6 +493,14 @@ fn highlight_def(
}
h
}
+ Definition::ExternCrateDecl(extern_crate) => {
+ let mut highlight =
+ Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot;
+ if extern_crate.alias(db).is_none() {
+ highlight |= HlMod::Library;
+ }
+ highlight
+ }
Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)),
Definition::BuiltinAttr(_) => Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)),
Definition::ToolModule(_) => Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)),
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
index 2c7823069..bbc6b55a6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -109,6 +109,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
";
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
index 901df147d..2657a6414 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
@@ -288,7 +288,7 @@ fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::Stri
fn module_def_to_hl_tag(def: Definition) -> HlTag {
let symbol = match def {
- Definition::Module(_) => SymbolKind::Module,
+ Definition::Module(_) | Definition::ExternCrateDecl(_) => SymbolKind::Module,
Definition::Function(_) => SymbolKind::Function,
Definition::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct,
Definition::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum,
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
index f98310911..6d4cdd0ef 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
@@ -29,6 +29,7 @@ pub enum HlTag {
Comment,
EscapeSequence,
FormatSpecifier,
+ InvalidEscapeSequence,
Keyword,
NumericLiteral,
Operator(HlOperator),
@@ -166,6 +167,7 @@ impl HlTag {
HlTag::CharLiteral => "char_literal",
HlTag::Comment => "comment",
HlTag::EscapeSequence => "escape_sequence",
+ HlTag::InvalidEscapeSequence => "invalid_escape_sequence",
HlTag::FormatSpecifier => "format_specifier",
HlTag::Keyword => "keyword",
HlTag::Punctuation(punct) => match punct {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
index 9ed65fbc8..4dcbfe4eb 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">not_static</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
index 567ab8ccc..bf5505caf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">allow</span><span class="parenthesis attribute">(</span><span class="none attribute">dead_code</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="tool_module attribute library">rustfmt</span><span class="operator attribute">::</span><span class="tool_module attribute library">skip</span><span class="attribute_bracket attribute">]</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
index 1e4c06df7..0d1b3c1f1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root library">foo</span><span class="semicolon">;</span>
<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
index 5d66f832d..dd1528ed0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">iter</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index 35f240d42..d5f92aa5d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
<span class="comment documentation">//! ```</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
index 87b9da46e..88a008796 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
@@ -40,8 +40,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">std</span><span class="semicolon">;</span>
-<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">alloc</span> <span class="keyword">as</span> <span class="module crate_root default_library declaration library">abc</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root default_library library">alloc</span> <span class="keyword">as</span> <span class="module crate_root declaration">abc</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="unresolved_reference">unresolved</span> <span class="keyword">as</span> <span class="module crate_root declaration">definitely_unresolved</span><span class="semicolon">;</span>
</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index 6b049f379..bdeb09d2f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">use</span> <span class="module">inner</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="module declaration">inner_mod</span><span class="brace">}</span><span class="semicolon">;</span>
<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span><span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
index d9c3db6fb..f9c33b8a6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">fixture</span><span class="parenthesis">(</span><span class="value_param declaration reference">ra_fixture</span><span class="colon">:</span> <span class="punctuation">&</span><span class="builtin_type">str</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
index 3900959be..2043752bc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_keywords.html
@@ -40,9 +40,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
-<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
+<pre><code><span class="keyword">extern</span> <span class="keyword">crate</span> <span class="self_keyword crate_root">self</span><span class="semicolon">;</span>
<span class="keyword">use</span> <span class="keyword crate_root public">crate</span><span class="semicolon">;</span>
<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
index f98e0b1cd..ec39998de 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code>
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index 2cbbf6964..06b66b302 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="module crate_root library">proc_macros</span><span class="operator">::</span><span class="macro library">mirror</span><span class="macro_bang">!</span> <span class="brace macro">{</span>
<span class="brace macro">{</span>
@@ -89,8 +90,18 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="brace">}</span>
<span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">concat</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">include</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">rustc_builtin_macro</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">format_args</span> <span class="brace">{</span><span class="brace">}</span>
+
+<span class="macro">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
- <span class="unresolved_reference">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, {}!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">noop</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
-<span class="brace">}</span></code></pre> \ No newline at end of file
+<span class="brace">}</span>
+</code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
index 8a1d69816..4dcf8e5f0 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">//! </span><span class="struct documentation injected intra_doc_link">[Struct]</span>
<span class="comment documentation">//! This is an intra doc injection test for modules</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
index c4c3e3dc2..084bbf2f7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[crate::foo::Struct]</span>
<span class="comment documentation">/// This is an intra doc injection test for modules</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
index 2369071ae..1af4bcfbd 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="numeric_literal">1</span> <span class="arithmetic">+</span> <span class="numeric_literal">1</span> <span class="arithmetic">-</span> <span class="numeric_literal">1</span> <span class="arithmetic">*</span> <span class="numeric_literal">1</span> <span class="arithmetic">/</span> <span class="numeric_literal">1</span> <span class="arithmetic">%</span> <span class="numeric_literal">1</span> <span class="bitwise">|</span> <span class="numeric_literal">1</span> <span class="bitwise">&</span> <span class="numeric_literal">1</span> <span class="logical">!</span> <span class="numeric_literal">1</span> <span class="bitwise">^</span> <span class="numeric_literal">1</span> <span class="bitwise">&gt;&gt;</span> <span class="numeric_literal">1</span> <span class="bitwise">&lt;&lt;</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
index bff35c897..ec18c3ea1 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="8121853618659664005" style="color: hsl(273,88%,88%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index fa374b04f..3ac8aa9cc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">println</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>arg<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="parenthesis">(</span><span class="brace">{</span>
@@ -105,6 +106,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\x65</span><span class="char_literal">'</span><span class="semicolon">;</span>
<span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="char_literal">'</span><span class="escape_sequence">\x00</span><span class="char_literal">'</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="variable declaration">a</span> <span class="operator">=</span> <span class="byte_literal">b'</span><span class="escape_sequence">\xFF</span><span class="byte_literal">'</span><span class="semicolon">;</span>
+
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="escape_sequence">{{</span><span class="string_literal macro">Hello</span><span class="escape_sequence">}}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="comment">// from https://doc.rust-lang.org/std/fmt/index.html</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="comment">// =&gt; "Hello"</span>
@@ -159,9 +162,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello</span><span class="escape_sequence">\n</span><span class="string_literal macro">World"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="escape_sequence">\u{48}</span><span class="escape_sequence">\x65</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6F</span><span class="string_literal macro"> World"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
- <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="invalid_escape_sequence">\xFF</span><span class="escape_sequence">\u{FF}</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="comment">// invalid non-UTF8 escape sequences</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\xFF</span><span class="invalid_escape_sequence">\u{FF}</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="comment">// valid bytes, invalid unicodes</span>
+ <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">c"</span><span class="escape_sequence">\u{FF}</span><span class="escape_sequence">\xFF</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="comment">// valid bytes, valid unicodes</span>
+ <span class="keyword">let</span> <span class="variable declaration reference">backslash</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">A</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
@@ -173,6 +177,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"mov eax, </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
- <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="operator macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+ <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="unresolved_reference macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="none macro">toho</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="variable macro reference">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span></code></pre> \ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
index 654d51b8a..c72ea54e9 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -40,7 +40,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
.control { font-style: italic; }
.reference { font-style: italic; font-weight: bold; }
-.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
</style>
<pre><code><span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">id</span> <span class="brace">{</span>
<span class="parenthesis">(</span><span class="punctuation">$</span><span class="parenthesis">(</span><span class="punctuation">$</span>tt<span class="colon">:</span>tt<span class="parenthesis">)</span><span class="punctuation">*</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index 497992f68..8749d355c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -48,6 +48,7 @@ fn macros() {
check_highlighting(
r#"
//- proc_macros: mirror
+//- /lib.rs crate:lib
proc_macros::mirror! {
{
,i32 :x pub
@@ -95,11 +96,23 @@ macro without_args {
}
}
+#[rustc_builtin_macro]
+macro_rules! concat {}
+#[rustc_builtin_macro]
+macro_rules! include {}
+#[rustc_builtin_macro]
+macro_rules! format_args {}
+
+include!(concat!("foo/", "foo.rs"));
+
fn main() {
- println!("Hello, {}!", 92);
+ format_args!("Hello, {}!", 92);
dont_color_me_braces!();
noop!(noop!(1));
}
+//- /foo/foo.rs crate:foo
+mod foo {}
+use self::foo as bar;
"#,
expect_file!["./test_data/highlight_macros.html"],
false,
@@ -451,6 +464,8 @@ fn main() {
let a = '\x65';
let a = '\x00';
+ let a = b'\xFF';
+
println!("Hello {{Hello}}");
// from https://doc.rust-lang.org/std/fmt/index.html
println!("Hello"); // => "Hello"
@@ -505,9 +520,10 @@ fn main() {
println!("Hello\nWorld");
println!("\u{48}\x65\x6C\x6C\x6F World");
- let _ = "\x28\x28\x00\x63\n";
- let _ = b"\x28\x28\x00\x63\n";
- let _ = r"\\";
+ let _ = "\x28\x28\x00\x63\xFF\u{FF}\n"; // invalid non-UTF8 escape sequences
+ let _ = b"\x28\x28\x00\x63\xFF\u{FF}\n"; // valid bytes, invalid unicodes
+ let _ = c"\u{FF}\xFF"; // valid bytes, valid unicodes
+ let backslash = r"\\";
println!("{\x41}", A = 92);
println!("{ничоси}", ничоси = 92);
@@ -520,7 +536,7 @@ fn main() {
toho!("{}fmt", 0);
asm!("mov eax, {0}");
format_args!(concat!("{}"), "{}");
- format_args!("{}", format_args!("{}", 0));
+ format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
}"#,
expect_file!["./test_data/highlight_strings.html"],
false,
@@ -788,6 +804,7 @@ fn test_extern_crate() {
//- /main.rs crate:main deps:std,alloc
extern crate std;
extern crate alloc as abc;
+extern crate unresolved as definitely_unresolved;
//- /std/lib.rs crate:std
pub struct S;
//- /alloc/lib.rs crate:alloc
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
new file mode 100644
index 000000000..2f6332abd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs
@@ -0,0 +1,409 @@
+use std::fmt;
+
+use hir::{Field, HirDisplay, Layout, Semantics, Type};
+use ide_db::{
+ defs::Definition,
+ helpers::{get_definition, pick_best_token},
+ RootDatabase,
+};
+use syntax::{AstNode, SyntaxKind};
+
+use crate::FilePosition;
+
+pub struct MemoryLayoutNode {
+ pub item_name: String,
+ pub typename: String,
+ pub size: u64,
+ pub alignment: u64,
+ pub offset: u64,
+ pub parent_idx: i64,
+ pub children_start: i64,
+ pub children_len: u64,
+}
+
+pub struct RecursiveMemoryLayout {
+ pub nodes: Vec<MemoryLayoutNode>,
+}
+
+// NOTE: this is currently strictly for testing and so isn't super useful as a visualization tool, however it could be adapted to become one?
+impl fmt::Display for RecursiveMemoryLayout {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fn process(
+ fmt: &mut fmt::Formatter<'_>,
+ nodes: &Vec<MemoryLayoutNode>,
+ idx: usize,
+ depth: usize,
+ ) -> fmt::Result {
+ let mut out = "\t".repeat(depth);
+ let node = &nodes[idx];
+ out += &format!(
+ "{}: {} (size: {}, align: {}, field offset: {})\n",
+ node.item_name, node.typename, node.size, node.alignment, node.offset
+ );
+ write!(fmt, "{}", out)?;
+ if node.children_start != -1 {
+ for j in nodes[idx].children_start
+ ..(nodes[idx].children_start + nodes[idx].children_len as i64)
+ {
+ process(fmt, nodes, j as usize, depth + 1)?;
+ }
+ }
+ Ok(())
+ }
+
+ process(fmt, &self.nodes, 0, 0)
+ }
+}
+
+enum FieldOrTupleIdx {
+ Field(Field),
+ TupleIdx(usize),
+}
+
+impl FieldOrTupleIdx {
+ fn name(&self, db: &RootDatabase) -> String {
+ match *self {
+ FieldOrTupleIdx::Field(f) => f
+ .name(db)
+ .as_str()
+ .map(|s| s.to_owned())
+ .unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())),
+ FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(),
+ }
+ }
+
+ fn index(&self) -> usize {
+ match *self {
+ FieldOrTupleIdx::Field(f) => f.index(),
+ FieldOrTupleIdx::TupleIdx(i) => i,
+ }
+ }
+}
+
+// Feature: View Memory Layout
+//
+// Displays the recursive memory layout of a datatype.
+//
+// |===
+// | Editor | Action Name
+//
+// | VS Code | **rust-analyzer: View Memory Layout**
+// |===
+pub(crate) fn view_memory_layout(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RecursiveMemoryLayout> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+ let token =
+ pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
+ SyntaxKind::IDENT => 3,
+ _ => 0,
+ })?;
+
+ let def = get_definition(&sema, token)?;
+
+ let ty = match def {
+ Definition::Adt(it) => it.ty(db),
+ Definition::TypeAlias(it) => it.ty(db),
+ Definition::BuiltinType(it) => it.ty(db),
+ Definition::SelfType(it) => it.self_ty(db),
+ Definition::Local(it) => it.ty(db),
+ Definition::Field(it) => it.ty(db),
+ Definition::Const(it) => it.ty(db),
+ Definition::Static(it) => it.ty(db),
+ _ => return None,
+ };
+
+ fn read_layout(
+ nodes: &mut Vec<MemoryLayoutNode>,
+ db: &RootDatabase,
+ ty: &Type,
+ layout: &Layout,
+ parent_idx: usize,
+ ) {
+ let mut fields = ty
+ .fields(db)
+ .into_iter()
+ .map(|(f, ty)| (FieldOrTupleIdx::Field(f), ty))
+ .chain(
+ ty.tuple_fields(db)
+ .into_iter()
+ .enumerate()
+ .map(|(i, ty)| (FieldOrTupleIdx::TupleIdx(i), ty)),
+ )
+ .collect::<Vec<_>>();
+
+ if fields.len() == 0 {
+ return;
+ }
+
+ fields.sort_by_key(|(f, _)| layout.field_offset(f.index()).unwrap());
+
+ let children_start = nodes.len();
+ nodes[parent_idx].children_start = children_start as i64;
+ nodes[parent_idx].children_len = fields.len() as u64;
+
+ for (field, child_ty) in fields.iter() {
+ if let Ok(child_layout) = child_ty.layout(db) {
+ nodes.push(MemoryLayoutNode {
+ item_name: field.name(db),
+ typename: child_ty.display(db).to_string(),
+ size: child_layout.size(),
+ alignment: child_layout.align(),
+ offset: layout.field_offset(field.index()).unwrap_or(0),
+ parent_idx: parent_idx as i64,
+ children_start: -1,
+ children_len: 0,
+ });
+ } else {
+ nodes.push(MemoryLayoutNode {
+ item_name: field.name(db)
+ + format!("(no layout data: {:?})", child_ty.layout(db).unwrap_err())
+ .as_ref(),
+ typename: child_ty.display(db).to_string(),
+ size: 0,
+ offset: 0,
+ alignment: 0,
+ parent_idx: parent_idx as i64,
+ children_start: -1,
+ children_len: 0,
+ });
+ }
+ }
+
+ for (i, (_, child_ty)) in fields.iter().enumerate() {
+ if let Ok(child_layout) = child_ty.layout(db) {
+ read_layout(nodes, db, &child_ty, &child_layout, children_start + i);
+ }
+ }
+ }
+
+ ty.layout(db)
+ .map(|layout| {
+ let item_name = match def {
+ // def is a datatype
+ Definition::Adt(_)
+ | Definition::TypeAlias(_)
+ | Definition::BuiltinType(_)
+ | Definition::SelfType(_) => "[ROOT]".to_owned(),
+
+ // def is an item
+ def => def
+ .name(db)
+ .map(|n| {
+ n.as_str()
+ .map(|s| s.to_owned())
+ .unwrap_or_else(|| format!(".{}", n.as_tuple_index().unwrap()))
+ })
+ .unwrap_or("[ROOT]".to_owned()),
+ };
+
+ let typename = ty.display(db).to_string();
+
+ let mut nodes = vec![MemoryLayoutNode {
+ item_name,
+ typename: typename.clone(),
+ size: layout.size(),
+ offset: 0,
+ alignment: layout.align(),
+ parent_idx: -1,
+ children_start: -1,
+ children_len: 0,
+ }];
+ read_layout(&mut nodes, db, &ty, &layout, 0);
+
+ RecursiveMemoryLayout { nodes }
+ })
+ .ok()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::fixture;
+ use expect_test::expect;
+
+ fn make_memory_layout(ra_fixture: &str) -> Option<RecursiveMemoryLayout> {
+ let (analysis, position, _) = fixture::annotations(ra_fixture);
+
+ view_memory_layout(&analysis.db, position)
+ }
+
+ #[test]
+ fn view_memory_layout_none() {
+ assert!(make_memory_layout(r#"$0"#).is_none());
+ assert!(make_memory_layout(r#"stru$0ct Blah {}"#).is_none());
+ }
+
+ #[test]
+ fn view_memory_layout_primitive() {
+ expect![[r#"
+ foo: i32 (size: 4, align: 4, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+fn main() {
+ let foo$0 = 109; // default i32
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_constant() {
+ expect![[r#"
+ BLAH: bool (size: 1, align: 1, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+const BLAH$0: bool = 0;
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_static() {
+ expect![[r#"
+ BLAH: bool (size: 1, align: 1, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+static BLAH$0: bool = 0;
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_tuple() {
+ expect![[r#"
+ x: (f64, u8, i64) (size: 24, align: 8, field offset: 0)
+ .0: f64 (size: 8, align: 8, field offset: 0)
+ .1: u8 (size: 1, align: 1, field offset: 8)
+ .2: i64 (size: 8, align: 8, field offset: 16)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+fn main() {
+ let x$0 = (101.0, 111u8, 119i64);
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_c_struct() {
+ expect![[r#"
+ [ROOT]: Blah (size: 16, align: 4, field offset: 0)
+ a: u32 (size: 4, align: 4, field offset: 0)
+ b: (i32, u8) (size: 8, align: 4, field offset: 4)
+ .0: i32 (size: 4, align: 4, field offset: 0)
+ .1: u8 (size: 1, align: 1, field offset: 4)
+ c: i8 (size: 1, align: 1, field offset: 12)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+#[repr(C)]
+struct Blah$0 {
+ a: u32,
+ b: (i32, u8),
+ c: i8,
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_struct() {
+ expect![[r#"
+ [ROOT]: Blah (size: 16, align: 4, field offset: 0)
+ b: (i32, u8) (size: 8, align: 4, field offset: 0)
+ .0: i32 (size: 4, align: 4, field offset: 0)
+ .1: u8 (size: 1, align: 1, field offset: 4)
+ a: u32 (size: 4, align: 4, field offset: 8)
+ c: i8 (size: 1, align: 1, field offset: 12)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+struct Blah$0 {
+ a: u32,
+ b: (i32, u8),
+ c: i8,
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_member() {
+ expect![[r#"
+ a: bool (size: 1, align: 1, field offset: 0)
+ "#]]
+ .assert_eq(
+ &make_memory_layout(
+ r#"
+#[repr(C)]
+struct Oof {
+ a$0: bool,
+}
+"#,
+ )
+ .unwrap()
+ .to_string(),
+ );
+ }
+
+ #[test]
+ fn view_memory_layout_alias() {
+ let ml_a = make_memory_layout(
+ r#"
+struct X {
+ a: u32,
+ b: i8,
+ c: (f32, f32),
+}
+
+type Foo$0 = X;
+"#,
+ )
+ .unwrap();
+
+ let ml_b = make_memory_layout(
+ r#"
+struct X$0 {
+ a: u32,
+ b: i8,
+ c: (f32, f32),
+}
+"#,
+ )
+ .unwrap();
+
+ assert_eq!(ml_a.to_string(), ml_b.to_string());
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml
index dcd0d7881..4d56c7719 100644
--- a/src/tools/rust-analyzer/crates/intern/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml
@@ -15,7 +15,7 @@ doctest = false
[dependencies]
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.4.0", features = ["raw-api"] }
-hashbrown = { version = "0.12.1", default-features = false }
+hashbrown.workspace = true
once_cell = "1.17.0"
rustc-hash = "1.1.0"
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
new file mode 100644
index 000000000..f041ca88a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml
@@ -0,0 +1,25 @@
+[package]
+name = "load-cargo"
+version = "0.0.0"
+description = "TBD"
+
+rust-version.workspace = true
+edition.workspace = true
+license.workspace = true
+authors.workspace = true
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+anyhow = "1.0.62"
+crossbeam-channel = "0.5.5"
+itertools = "0.10.5"
+tracing = "0.1.35"
+
+ide.workspace = true
+ide-db.workspace =true
+proc-macro-api.workspace = true
+project-model.workspace = true
+tt.workspace = true
+vfs.workspace = true
+vfs-notify.workspace = true
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
new file mode 100644
index 000000000..7a795dd62
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -0,0 +1,441 @@
+//! Loads a Cargo project into a static instance of analysis, without support
+//! for incorporating changes.
+// Note, don't remove any public api from this. This API is consumed by external tools
+// to run rust-analyzer as a library.
+use std::{collections::hash_map::Entry, mem, path::Path, sync};
+
+use ::tt::token_id as tt;
+use crossbeam_channel::{unbounded, Receiver};
+use ide::{AnalysisHost, Change, SourceRoot};
+use ide_db::{
+ base_db::{
+ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+ ProcMacroLoadResult, ProcMacros,
+ },
+ FxHashMap,
+};
+use itertools::Itertools;
+use proc_macro_api::{MacroDylib, ProcMacroServer};
+use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
+use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
+
+pub struct LoadCargoConfig {
+ pub load_out_dirs_from_check: bool,
+ pub with_proc_macro_server: ProcMacroServerChoice,
+ pub prefill_caches: bool,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ProcMacroServerChoice {
+ Sysroot,
+ Explicit(AbsPathBuf),
+ None,
+}
+
+pub fn load_workspace_at(
+ root: &Path,
+ cargo_config: &CargoConfig,
+ load_config: &LoadCargoConfig,
+ progress: &dyn Fn(String),
+) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
+ let root = ProjectManifest::discover_single(&root)?;
+ let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
+
+ if load_config.load_out_dirs_from_check {
+ let build_scripts = workspace.run_build_scripts(cargo_config, progress)?;
+ workspace.set_build_scripts(build_scripts)
+ }
+
+ load_workspace(workspace, &cargo_config.extra_env, load_config)
+}
+
+pub fn load_workspace(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, String>,
+ load_config: &LoadCargoConfig,
+) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let (sender, receiver) = unbounded();
+ let mut vfs = vfs::Vfs::default();
+ let mut loader = {
+ let loader =
+ vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ Box::new(loader)
+ };
+
+ let proc_macro_server = match &load_config.with_proc_macro_server {
+ ProcMacroServerChoice::Sysroot => ws
+ .find_sysroot_proc_macro_srv()
+ .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)),
+ ProcMacroServerChoice::Explicit(path) => {
+ ProcMacroServer::spawn(path.clone()).map_err(Into::into)
+ }
+ ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")),
+ };
+
+ let (crate_graph, proc_macros) = ws.to_crate_graph(
+ &mut |path: &AbsPath| {
+ let contents = loader.load_sync(path);
+ let path = vfs::VfsPath::from(path.to_path_buf());
+ vfs.set_file_contents(path.clone(), contents);
+ vfs.file_id(&path)
+ },
+ extra_env,
+ );
+ let proc_macros = {
+ let proc_macro_server = match &proc_macro_server {
+ Ok(it) => Ok(it),
+ Err(e) => Err(e.to_string()),
+ };
+ proc_macros
+ .into_iter()
+ .map(|(crate_id, path)| {
+ (
+ crate_id,
+ path.map_or_else(
+ |_| Err("proc macro crate is missing dylib".to_owned()),
+ |(_, path)| {
+ proc_macro_server.as_ref().map_err(Clone::clone).and_then(
+ |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
+ )
+ },
+ ),
+ )
+ })
+ .collect()
+ };
+
+ let project_folders = ProjectFolders::new(&[ws], &[]);
+ loader.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch: vec![],
+ version: 0,
+ });
+
+ let host = load_crate_graph(
+ crate_graph,
+ proc_macros,
+ project_folders.source_root_config,
+ &mut vfs,
+ &receiver,
+ );
+
+ if load_config.prefill_caches {
+ host.analysis().parallel_prime_caches(1, |_| {})?;
+ }
+ Ok((host, vfs, proc_macro_server.ok()))
+}
+
+#[derive(Default)]
+pub struct ProjectFolders {
+ pub load: Vec<vfs::loader::Entry>,
+ pub watch: Vec<usize>,
+ pub source_root_config: SourceRootConfig,
+}
+
+impl ProjectFolders {
+ pub fn new(workspaces: &[ProjectWorkspace], global_excludes: &[AbsPathBuf]) -> ProjectFolders {
+ let mut res = ProjectFolders::default();
+ let mut fsc = FileSetConfig::builder();
+ let mut local_filesets = vec![];
+
+ // Dedup source roots
+ // Depending on the project setup, we can have duplicated source roots, or for example in
+ // the case of the rustc workspace, we can end up with two source roots that are almost the
+ // same but not quite, like:
+ // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
+ // PackageRoot {
+ // is_local: true,
+ // include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
+ // exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
+ // }
+ //
+ // The first one comes from the explicit rustc workspace which points to the rustc workspace itself
+ // The second comes from the rustc workspace that we load as the actual project workspace
+ // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
+ // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
+ // so we need to also coalesce the includes if they overlap.
+
+ let mut roots: Vec<_> = workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .update(|root| root.include.sort())
+ .sorted_by(|a, b| a.include.cmp(&b.include))
+ .collect();
+
+ // map that tracks indices of overlapping roots
+ let mut overlap_map = FxHashMap::<_, Vec<_>>::default();
+ let mut done = false;
+
+ while !mem::replace(&mut done, true) {
+ // maps include paths to indices of the corresponding root
+ let mut include_to_idx = FxHashMap::default();
+ // Find and note down the indices of overlapping roots
+ for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) {
+ for include in &root.include {
+ match include_to_idx.entry(include) {
+ Entry::Occupied(e) => {
+ overlap_map.entry(*e.get()).or_default().push(idx);
+ }
+ Entry::Vacant(e) => {
+ e.insert(idx);
+ }
+ }
+ }
+ }
+ for (k, v) in overlap_map.drain() {
+ done = false;
+ for v in v {
+ let r = mem::replace(
+ &mut roots[v],
+ PackageRoot { is_local: false, include: vec![], exclude: vec![] },
+ );
+ roots[k].is_local |= r.is_local;
+ roots[k].include.extend(r.include);
+ roots[k].exclude.extend(r.exclude);
+ }
+ roots[k].include.sort();
+ roots[k].exclude.sort();
+ roots[k].include.dedup();
+ roots[k].exclude.dedup();
+ }
+ }
+
+ for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
+ let file_set_roots: Vec<VfsPath> =
+ root.include.iter().cloned().map(VfsPath::from).collect();
+
+ let entry = {
+ let mut dirs = vfs::loader::Directories::default();
+ dirs.extensions.push("rs".into());
+ dirs.include.extend(root.include);
+ dirs.exclude.extend(root.exclude);
+ for excl in global_excludes {
+ if dirs
+ .include
+ .iter()
+ .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
+ {
+ dirs.exclude.push(excl.clone());
+ }
+ }
+
+ vfs::loader::Entry::Directories(dirs)
+ };
+
+ if root.is_local {
+ res.watch.push(res.load.len());
+ }
+ res.load.push(entry);
+
+ if root.is_local {
+ local_filesets.push(fsc.len());
+ }
+ fsc.add_file_set(file_set_roots)
+ }
+
+ let fsc = fsc.build();
+ res.source_root_config = SourceRootConfig { fsc, local_filesets };
+
+ res
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct SourceRootConfig {
+ pub fsc: FileSetConfig,
+ pub local_filesets: Vec<usize>,
+}
+
+impl SourceRootConfig {
+ pub fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
+ self.fsc
+ .partition(vfs)
+ .into_iter()
+ .enumerate()
+ .map(|(idx, file_set)| {
+ let is_local = self.local_filesets.contains(&idx);
+ if is_local {
+ SourceRoot::new_local(file_set)
+ } else {
+ SourceRoot::new_library(file_set)
+ }
+ })
+ .collect()
+ }
+}
+
+/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
+/// with an identity dummy expander.
+pub fn load_proc_macro(
+ server: &ProcMacroServer,
+ path: &AbsPath,
+ dummy_replace: &[Box<str>],
+) -> ProcMacroLoadResult {
+ let res: Result<Vec<_>, String> = (|| {
+ let dylib = MacroDylib::new(path.to_path_buf());
+ let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ if vec.is_empty() {
+ return Err("proc macro library returned no proc macros".to_string());
+ }
+ Ok(vec
+ .into_iter()
+ .map(|expander| expander_to_proc_macro(expander, dummy_replace))
+ .collect())
+ })();
+ match res {
+ Ok(proc_macros) => {
+ tracing::info!(
+ "Loaded proc-macros for {path}: {:?}",
+ proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
+ );
+ Ok(proc_macros)
+ }
+ Err(e) => {
+ tracing::warn!("proc-macro loading for {path} failed: {e}");
+ Err(e)
+ }
+ }
+}
+
+fn load_crate_graph(
+ crate_graph: CrateGraph,
+ proc_macros: ProcMacros,
+ source_root_config: SourceRootConfig,
+ vfs: &mut vfs::Vfs,
+ receiver: &Receiver<vfs::loader::Message>,
+) -> AnalysisHost {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
+ let mut host = AnalysisHost::new(lru_cap);
+ let mut analysis_change = Change::new();
+
+ host.raw_database_mut().enable_proc_attr_macros();
+
+ // wait until Vfs has loaded all roots
+ for task in receiver {
+ match task {
+ vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => {
+ if n_done == n_total {
+ break;
+ }
+ }
+ vfs::loader::Message::Loaded { files } => {
+ for (path, contents) in files {
+ vfs.set_file_contents(path.into(), contents);
+ }
+ }
+ }
+ }
+ let changes = vfs.take_changes();
+ for file in changes {
+ if file.exists() {
+ let contents = vfs.file_contents(file.file_id);
+ if let Ok(text) = std::str::from_utf8(contents) {
+ analysis_change.change_file(file.file_id, Some(text.into()))
+ }
+ }
+ }
+ let source_roots = source_root_config.partition(vfs);
+ analysis_change.set_roots(source_roots);
+
+ analysis_change.set_crate_graph(crate_graph);
+ analysis_change.set_proc_macros(proc_macros);
+
+ host.apply_change(analysis_change);
+ host
+}
+
+fn expander_to_proc_macro(
+ expander: proc_macro_api::ProcMacro,
+ dummy_replace: &[Box<str>],
+) -> ProcMacro {
+ let name = From::from(expander.name());
+ let kind = match expander.kind() {
+ proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
+ proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
+ proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
+ };
+ let expander: sync::Arc<dyn ProcMacroExpander> =
+ if dummy_replace.iter().any(|replace| &**replace == name) {
+ match kind {
+ ProcMacroKind::Attr => sync::Arc::new(IdentityExpander),
+ _ => sync::Arc::new(EmptyExpander),
+ }
+ } else {
+ sync::Arc::new(Expander(expander))
+ };
+ ProcMacro { name, kind, expander }
+}
+
+#[derive(Debug)]
+struct Expander(proc_macro_api::ProcMacro);
+
+impl ProcMacroExpander for Expander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ attrs: Option<&tt::Subtree>,
+ env: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+ match self.0.expand(subtree, attrs, env) {
+ Ok(Ok(subtree)) => Ok(subtree),
+ Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+ Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+ }
+ }
+}
+
+/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
+#[derive(Debug)]
+struct IdentityExpander;
+
+impl ProcMacroExpander for IdentityExpander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+}
+
+/// Empty expander, used for proc-macros that are deliberately ignored by the user.
+#[derive(Debug)]
+struct EmptyExpander;
+
+impl ProcMacroExpander for EmptyExpander {
+ fn expand(
+ &self,
+ _: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(tt::Subtree::empty())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::SourceDatabase;
+
+ use super::*;
+
+ #[test]
+ fn test_loading_rust_analyzer() {
+ let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro_server: ProcMacroServerChoice::None,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
+
+ let n_crates = host.raw_database().crate_graph().iter().count();
+ // RA has quite a few crates, but the exact count doesn't matter
+ assert!(n_crates > 20);
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index d28dd17de..9d43e1304 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -20,10 +20,7 @@ fn benchmark_parse_macro_rules() {
let rules = macro_rules_fixtures_tt();
let hash: usize = {
let _pt = bench("mbe parse macro rules");
- rules
- .values()
- .map(|it| DeclarativeMacro::parse_macro_rules(it, true).unwrap().rules.len())
- .sum()
+ rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it, true).rules.len()).sum()
};
assert_eq!(hash, 1144);
}
@@ -41,7 +38,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
- let res = rules[&id].expand(&tt);
+ let res = rules[&id].expand(tt);
assert!(res.err.is_none());
res.value.token_trees.len()
})
@@ -53,7 +50,7 @@ fn benchmark_expand_macro_rules() {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
macro_rules_fixtures_tt()
.into_iter()
- .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true).unwrap()))
+ .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
@@ -105,7 +102,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed);
}
- if it.expand(&subtree).err.is_none() {
+ if it.expand(subtree.clone()).err.is_none() {
res.push((name.clone(), subtree));
break;
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index 8e2181e97..f2d89d3ef 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -123,4 +123,14 @@ enum Fragment {
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
/// tricky to handle in the parser, and rustc doesn't handle those either.
Expr(tt::TokenTree),
+ /// There are roughly two types of paths: paths in expression context, where a
+ /// separator `::` between an identifier and its following generic argument list
+ /// is mandatory, and paths in type context, where `::` can be omitted.
+ ///
+ /// Unlike rustc, we need to transform the parsed fragments back into tokens
+ /// during transcription. When the matched path fragment is a type-context path
+ /// and is trasncribed as an expression-context path, verbatim transcription
+ /// would cause a syntax error. We need to fix it up just before transcribing;
+ /// see `transcriber::fix_up_and_push_path_tt()`.
+ Path(tt::TokenTree),
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index 474826079..1471af98b 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -742,7 +742,11 @@ fn match_meta_var(
is_2021: bool,
) -> ExpandResult<Option<Fragment>> {
let fragment = match kind {
- MetaVarKind::Path => parser::PrefixEntryPoint::Path,
+ MetaVarKind::Path => {
+ return input
+ .expect_fragment(parser::PrefixEntryPoint::Path)
+ .map(|it| it.map(Fragment::Path));
+ }
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
MetaVarKind::Pat => parser::PrefixEntryPoint::Pat,
@@ -771,7 +775,7 @@ fn match_meta_var(
.expect_fragment(parser::PrefixEntryPoint::Expr)
.map(|tt| tt.map(Fragment::Expr));
}
- _ => {
+ MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind {
MetaVarKind::Ident => input
.expect_ident()
@@ -799,7 +803,7 @@ fn match_meta_var(
})
.map_err(|()| ExpandError::binding_error("expected literal"))
}
- _ => Err(ExpandError::UnexpectedToken),
+ _ => unreachable!(),
};
return tt_result.map(|it| Some(Fragment::Tokens(it))).into();
}
@@ -884,7 +888,7 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
-impl<'a> TtIter<'a> {
+impl TtIter<'_> {
fn expect_separator(&mut self, separator: &Separator) -> bool {
let mut fork = self.clone();
let ok = match separator {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index 6161af185..cdac2f1e3 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -400,7 +400,8 @@ fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
}
buf.push(tt.into())
}
- Fragment::Tokens(tt) | Fragment::Expr(tt) => buf.push(tt),
+ Fragment::Path(tt::TokenTree::Subtree(tt)) => fix_up_and_push_path_tt(buf, tt),
+ Fragment::Tokens(tt) | Fragment::Expr(tt) | Fragment::Path(tt) => buf.push(tt),
}
}
@@ -411,6 +412,45 @@ fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
}
}
+/// Inserts the path separator `::` between an identifier and its following generic
+/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
+/// we need this fixup.
+fn fix_up_and_push_path_tt(buf: &mut Vec<tt::TokenTree>, subtree: tt::Subtree) {
+ stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
+ let mut prev_was_ident = false;
+ // Note that we only need to fix up the top-level `TokenTree`s because the
+ // context of the paths in the descendant `Subtree`s won't be changed by the
+ // mbe transcription.
+ for tt in subtree.token_trees {
+ if prev_was_ident {
+ // Pedantically, `(T) -> U` in `FnOnce(T) -> U` is treated as a generic
+ // argument list and thus needs `::` between it and `FnOnce`. However in
+ // today's Rust this type of path *semantically* cannot appear as a
+ // top-level expression-context path, so we can safely ignore it.
+ if let tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '<', .. })) = tt {
+ buf.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ':',
+ spacing: tt::Spacing::Joint,
+ span: tt::Span::unspecified(),
+ })
+ .into(),
+ );
+ buf.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ':',
+ spacing: tt::Spacing::Alone,
+ span: tt::Span::unspecified(),
+ })
+ .into(),
+ );
+ }
+ }
+ prev_was_ident = matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Ident(_)));
+ buf.push(tt);
+ }
+}
+
/// Handles `${count(t, depth)}`. `our_depth` is the recursion depth and `count_depth` is the depth
/// defined by the metavar expression.
fn count(
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index 5ef20ff8a..9d886a1c9 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -28,13 +28,13 @@ use crate::{
tt_iter::TtIter,
};
-// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use self::tt::{Delimiter, DelimiterKind, Punct};
pub use ::parser::TopEntryPoint;
pub use crate::{
syntax_bridge::{
- parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
+ parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map,
+ syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree,
syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
SyntheticTokenId,
},
@@ -131,6 +131,7 @@ pub struct DeclarativeMacro {
// This is used for correctly determining the behavior of the pat fragment
// FIXME: This should be tracked by hygiene of the fragment identifier!
is_2021: bool,
+ err: Option<Box<ParseError>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -206,81 +207,118 @@ impl Shift {
}
}
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Origin {
Def,
Call,
}
impl DeclarativeMacro {
+ pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro {
+ DeclarativeMacro {
+ rules: Box::default(),
+ shift: Shift(0),
+ is_2021,
+ err: Some(Box::new(err)),
+ }
+ }
+
/// The old, `macro_rules! m {}` flavor.
- pub fn parse_macro_rules(
- tt: &tt::Subtree,
- is_2021: bool,
- ) -> Result<DeclarativeMacro, ParseError> {
+ pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
// Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier.
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
+ let mut err = None;
+
while src.len() > 0 {
- let rule = Rule::parse(&mut src, true)?;
+ let rule = match Rule::parse(&mut src, true) {
+ Ok(it) => it,
+ Err(e) => {
+ err = Some(Box::new(e));
+ break;
+ }
+ };
rules.push(rule);
if let Err(()) = src.expect_char(';') {
if src.len() > 0 {
- return Err(ParseError::expected("expected `;`"));
+ err = Some(Box::new(ParseError::expected("expected `;`")));
}
break;
}
}
for Rule { lhs, .. } in &rules {
- validate(lhs)?;
+ if let Err(e) = validate(lhs) {
+ err = Some(Box::new(e));
+ break;
+ }
}
- Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 })
+ DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
}
/// The new, unstable `macro m {}` flavor.
- pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> Result<DeclarativeMacro, ParseError> {
+ pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro {
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
+ let mut err = None;
if tt::DelimiterKind::Brace == tt.delimiter.kind {
cov_mark::hit!(parse_macro_def_rules);
while src.len() > 0 {
- let rule = Rule::parse(&mut src, true)?;
+ let rule = match Rule::parse(&mut src, true) {
+ Ok(it) => it,
+ Err(e) => {
+ err = Some(Box::new(e));
+ break;
+ }
+ };
rules.push(rule);
if let Err(()) = src.expect_any_char(&[';', ',']) {
if src.len() > 0 {
- return Err(ParseError::expected("expected `;` or `,` to delimit rules"));
+ err = Some(Box::new(ParseError::expected(
+ "expected `;` or `,` to delimit rules",
+ )));
}
break;
}
}
} else {
cov_mark::hit!(parse_macro_def_simple);
- let rule = Rule::parse(&mut src, false)?;
- if src.len() != 0 {
- return Err(ParseError::expected("remaining tokens in macro def"));
+ match Rule::parse(&mut src, false) {
+ Ok(rule) => {
+ if src.len() != 0 {
+ err = Some(Box::new(ParseError::expected("remaining tokens in macro def")));
+ }
+ rules.push(rule);
+ }
+ Err(e) => {
+ err = Some(Box::new(e));
+ }
}
- rules.push(rule);
}
for Rule { lhs, .. } in &rules {
- validate(lhs)?;
+ if let Err(e) = validate(lhs) {
+ err = Some(Box::new(e));
+ break;
+ }
}
- Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 })
+ DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err }
}
- pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
- // apply shift
- let mut tt = tt.clone();
+ pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
self.shift.shift_all(&mut tt);
expander::expand_rules(&self.rules, &tt, self.is_2021)
}
+ pub fn err(&self) -> Option<&ParseError> {
+ self.err.as_deref()
+ }
+
pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
self.shift.shift(id)
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index 8cbf0f8fc..62b2accf5 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -53,6 +53,37 @@ pub fn syntax_node_to_token_tree_with_modifications(
(subtree, c.id_alloc.map, c.id_alloc.next_id)
}
+/// Convert the syntax node to a `TokenTree` (what macro
+/// will consume).
+pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap {
+ syntax_node_to_token_map_with_modifications(
+ node,
+ Default::default(),
+ 0,
+ Default::default(),
+ Default::default(),
+ )
+ .0
+}
+
+/// Convert the syntax node to a `TokenTree` (what macro will consume)
+/// with the censored range excluded.
+pub fn syntax_node_to_token_map_with_modifications(
+ node: &SyntaxNode,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+) -> (TokenMap, u32) {
+ let global_offset = node.text_range().start();
+ let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
+ collect_tokens(&mut c);
+ c.id_alloc.map.shrink_to_fit();
+ always!(c.replace.is_empty(), "replace: {:?}", c.replace);
+ always!(c.append.is_empty(), "append: {:?}", c.append);
+ (c.id_alloc.map, c.id_alloc.next_id)
+}
+
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct SyntheticTokenId(pub u32);
@@ -327,6 +358,111 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
}
}
+fn collect_tokens<C: TokenConverter>(conv: &mut C) {
+ struct StackEntry {
+ idx: usize,
+ open_range: TextRange,
+ delimiter: tt::DelimiterKind,
+ }
+
+ let entry = StackEntry {
+ delimiter: tt::DelimiterKind::Invisible,
+ // never used (delimiter is `None`)
+ idx: !0,
+ open_range: TextRange::empty(TextSize::of('.')),
+ };
+ let mut stack = NonEmptyVec::new(entry);
+
+ loop {
+ let StackEntry { delimiter, .. } = stack.last_mut();
+ let (token, range) = match conv.bump() {
+ Some(it) => it,
+ None => break,
+ };
+ let synth_id = token.synthetic_id(conv);
+
+ let kind = token.kind(conv);
+ if kind == COMMENT {
+ // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
+ // figure out which token id to use for the doc comment, if it is converted successfully.
+ let next_id = conv.id_alloc().peek_next_id();
+ if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) {
+ let id = conv.id_alloc().alloc(range, synth_id);
+ debug_assert_eq!(id, next_id);
+ }
+ continue;
+ }
+ if kind.is_punct() && kind != UNDERSCORE {
+ if synth_id.is_none() {
+ assert_eq!(range.len(), TextSize::of('.'));
+ }
+
+ let expected = match delimiter {
+ tt::DelimiterKind::Parenthesis => Some(T![')']),
+ tt::DelimiterKind::Brace => Some(T!['}']),
+ tt::DelimiterKind::Bracket => Some(T![']']),
+ tt::DelimiterKind::Invisible => None,
+ };
+
+ if let Some(expected) = expected {
+ if kind == expected {
+ if let Some(entry) = stack.pop() {
+ conv.id_alloc().close_delim(entry.idx, Some(range));
+ }
+ continue;
+ }
+ }
+
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ if let Some(kind) = delim {
+ let (_id, idx) = conv.id_alloc().open_delim(range, synth_id);
+
+ stack.push(StackEntry { idx, open_range: range, delimiter: kind });
+ continue;
+ }
+
+ conv.id_alloc().alloc(range, synth_id);
+ } else {
+ macro_rules! make_leaf {
+ ($i:ident) => {{
+ conv.id_alloc().alloc(range, synth_id);
+ }};
+ }
+ match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let char_unit = TextSize::of('\'');
+ let r = TextRange::at(range.start(), char_unit);
+ conv.id_alloc().alloc(r, synth_id);
+
+ let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
+ conv.id_alloc().alloc(r, synth_id);
+ continue;
+ }
+ _ => continue,
+ };
+ };
+
+ // If we get here, we've consumed all input tokens.
+ // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
+ // Merge them so we're left with one.
+ while let Some(entry) = stack.pop() {
+ conv.id_alloc().close_delim(entry.idx, None);
+ conv.id_alloc().alloc(entry.open_range, None);
+ }
+ }
+}
+
fn is_single_token_op(kind: SyntaxKind) -> bool {
matches!(
kind,
@@ -509,12 +645,12 @@ trait TokenConverter: Sized {
fn id_alloc(&mut self) -> &mut TokenIdAlloc;
}
-impl<'a> SrcToken<RawConverter<'a>> for usize {
- fn kind(&self, ctx: &RawConverter<'a>) -> SyntaxKind {
+impl SrcToken<RawConverter<'_>> for usize {
+ fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
- fn to_char(&self, ctx: &RawConverter<'a>) -> Option<char> {
+ fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> {
ctx.lexed.text(*self).chars().next()
}
@@ -522,12 +658,12 @@ impl<'a> SrcToken<RawConverter<'a>> for usize {
ctx.lexed.text(*self).into()
}
- fn synthetic_id(&self, _ctx: &RawConverter<'a>) -> Option<SyntheticTokenId> {
+ fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option<SyntheticTokenId> {
None
}
}
-impl<'a> TokenConverter for RawConverter<'a> {
+impl TokenConverter for RawConverter<'_> {
type Token = usize;
fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
@@ -800,7 +936,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
Some(&texts[idx..texts.len() - (1 - idx)])
}
-impl<'a> TtTreeSink<'a> {
+impl TtTreeSink<'_> {
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.
fn float_split(&mut self, has_pseudo_dot: bool) {
diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
index c923e7a69..73a27df5d 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs
@@ -110,4 +110,15 @@ impl TokenMap {
// FIXME: This could be accidentally quadratic
self.entries.remove(idx);
}
+
+ pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
+ self.entries.iter().filter_map(|&(tid, tr)| match tr {
+ TokenTextRange::Token(range) => Some((tid, range)),
+ TokenTextRange::Delimiter(_) => None,
+ })
+ }
+
+ pub fn filter(&mut self, id: impl Fn(tt::TokenId) -> bool) {
+ self.entries.retain(|&(tid, _)| id(tid));
+ }
}
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
index 59dbf1568..79ff8ca28 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -197,4 +197,4 @@ impl<'a> Iterator for TtIter<'a> {
}
}
-impl<'a> std::iter::ExactSizeIterator for TtIter<'a> {}
+impl std::iter::ExactSizeIterator for TtIter<'_> {}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
index 1814e0e54..333318f53 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -165,6 +165,40 @@ pub(crate) mod entry {
}
m.complete(p, ERROR);
}
+
+ pub(crate) fn eager_macro_input(p: &mut Parser<'_>) {
+ let m = p.start();
+
+ let closing_paren_kind = match p.current() {
+ T!['{'] => T!['}'],
+ T!['('] => T![')'],
+ T!['['] => T![']'],
+ _ => {
+ p.error("expected `{`, `[`, `(`");
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ return;
+ }
+ };
+ p.bump_any();
+ while !p.at(EOF) && !p.at(closing_paren_kind) {
+ expressions::expr(p);
+ if !p.at(EOF) && !p.at(closing_paren_kind) {
+ p.expect(T![,]);
+ }
+ }
+ p.expect(closing_paren_kind);
+ if p.at(EOF) {
+ m.complete(p, MACRO_EAGER_INPUT);
+ return;
+ }
+ while !p.at(EOF) {
+ p.bump_any();
+ }
+ m.complete(p, ERROR);
+ }
}
}
@@ -211,70 +245,54 @@ impl BlockLike {
const VISIBILITY_FIRST: TokenSet = TokenSet::new(&[T![pub], T![crate]]);
fn opt_visibility(p: &mut Parser<'_>, in_tuple_field: bool) -> bool {
- match p.current() {
- T![pub] => {
- let m = p.start();
- p.bump(T![pub]);
- if p.at(T!['(']) {
- match p.nth(1) {
- // test crate_visibility
- // pub(crate) struct S;
- // pub(self) struct S;
- // pub(super) struct S;
-
- // test_err crate_visibility_empty_recover
- // pub() struct S;
-
- // test pub_parens_typepath
- // struct B(pub (super::A));
- // struct B(pub (crate::A,));
- T![crate] | T![self] | T![super] | T![ident] | T![')'] if p.nth(2) != T![:] => {
- // If we are in a tuple struct, then the parens following `pub`
- // might be an tuple field, not part of the visibility. So in that
- // case we don't want to consume an identifier.
-
- // test pub_tuple_field
- // struct MyStruct(pub (u32, u32));
- // struct MyStruct(pub (u32));
- // struct MyStruct(pub ());
- if !(in_tuple_field && matches!(p.nth(1), T![ident] | T![')'])) {
- p.bump(T!['(']);
- paths::use_path(p);
- p.expect(T![')']);
- }
- }
- // test crate_visibility_in
- // pub(in super::A) struct S;
- // pub(in crate) struct S;
- T![in] => {
- p.bump(T!['(']);
- p.bump(T![in]);
- paths::use_path(p);
- p.expect(T![')']);
- }
- _ => {}
+ if !p.at(T![pub]) {
+ return false;
+ }
+
+ let m = p.start();
+ p.bump(T![pub]);
+ if p.at(T!['(']) {
+ match p.nth(1) {
+ // test crate_visibility
+ // pub(crate) struct S;
+ // pub(self) struct S;
+ // pub(super) struct S;
+
+ // test_err crate_visibility_empty_recover
+ // pub() struct S;
+
+ // test pub_parens_typepath
+ // struct B(pub (super::A));
+ // struct B(pub (crate::A,));
+ T![crate] | T![self] | T![super] | T![ident] | T![')'] if p.nth(2) != T![:] => {
+ // If we are in a tuple struct, then the parens following `pub`
+ // might be an tuple field, not part of the visibility. So in that
+ // case we don't want to consume an identifier.
+
+ // test pub_tuple_field
+ // struct MyStruct(pub (u32, u32));
+ // struct MyStruct(pub (u32));
+ // struct MyStruct(pub ());
+ if !(in_tuple_field && matches!(p.nth(1), T![ident] | T![')'])) {
+ p.bump(T!['(']);
+ paths::use_path(p);
+ p.expect(T![')']);
}
}
- m.complete(p, VISIBILITY);
- true
- }
- // test crate_keyword_vis
- // crate fn main() { }
- // struct S { crate field: u32 }
- // struct T(crate u32);
- T![crate] => {
- if p.nth_at(1, T![::]) {
- // test crate_keyword_path
- // fn foo() { crate::foo(); }
- return false;
+ // test crate_visibility_in
+ // pub(in super::A) struct S;
+ // pub(in crate) struct S;
+ T![in] => {
+ p.bump(T!['(']);
+ p.bump(T![in]);
+ paths::use_path(p);
+ p.expect(T![')']);
}
- let m = p.start();
- p.bump(T![crate]);
- m.complete(p, VISIBILITY);
- true
+ _ => {}
}
- _ => false,
}
+ m.complete(p, VISIBILITY);
+ true
}
fn opt_rename(p: &mut Parser<'_>) {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
index e589b6993..211af98e6 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs
@@ -32,6 +32,9 @@ const GENERIC_ARG_FIRST: TokenSet = TokenSet::new(&[
])
.union(types::TYPE_FIRST);
+// Despite its name, it can also be used for generic param list.
+const GENERIC_ARG_RECOVERY_SET: TokenSet = TokenSet::new(&[T![>], T![,]]);
+
// test generic_arg
// type T = S<i32>;
fn generic_arg(p: &mut Parser<'_>) -> bool {
@@ -55,6 +58,15 @@ fn generic_arg(p: &mut Parser<'_>) -> bool {
// test assoc_type_eq
// type T = StreamingIterator<Item<'a> = &'a T>;
types::type_(p);
+ } else if p.at_ts(GENERIC_ARG_RECOVERY_SET) {
+ // Although `const_arg()` recovers as expected, we want to
+ // handle those here to give the following message because
+ // we don't know whether this associated item is a type or
+ // const at this point.
+
+ // test_err recover_from_missing_assoc_item_binding
+ // fn f() -> impl Iterator<Item = , Item = > {}
+ p.error("missing associated item binding");
} else {
// test assoc_const_eq
// fn foo<F: Foo<N=3>>() {}
@@ -141,12 +153,17 @@ pub(super) fn const_arg_expr(p: &mut Parser<'_>) {
expressions::literal(p);
lm.complete(p, PREFIX_EXPR);
}
- _ => {
+ _ if paths::is_use_path_start(p) => {
// This shouldn't be hit by `const_arg`
let lm = p.start();
paths::use_path(p);
lm.complete(p, PATH_EXPR);
}
+ _ => {
+ // test_err recover_from_missing_const_default
+ // struct A<const N: i32 = , const M: i32 =>;
+ p.err_recover("expected a generic const argument", GENERIC_ARG_RECOVERY_SET);
+ }
}
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
index 7fcf938ba..8ed1c84c4 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs
@@ -79,10 +79,9 @@ fn const_param(p: &mut Parser<'_>, m: Marker) {
p.error("missing type for const parameter");
}
- if p.at(T![=]) {
+ if p.eat(T![=]) {
// test const_param_default_literal
// struct A<const N: i32 = -1>;
- p.bump(T![=]);
// test const_param_default_expression
// struct A<const N: i32 = { 1 }>;
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
index 1c056819f..4e850b1f7 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -328,9 +328,6 @@ fn macro_rules(p: &mut Parser<'_>, m: Marker) {
p.bump_remap(T![macro_rules]);
p.expect(T![!]);
- if p.at(IDENT) {
- name(p);
- }
// Special-case `macro_rules! try`.
// This is a hack until we do proper edition support
@@ -340,6 +337,8 @@ fn macro_rules(p: &mut Parser<'_>, m: Marker) {
let m = p.start();
p.bump_remap(IDENT);
m.complete(p, NAME);
+ } else {
+ name(p);
}
match p.current() {
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index 1aba1f767..c155e8aaf 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -75,6 +75,8 @@ pub enum TopEntryPoint {
/// Edge case -- macros generally don't expand to attributes, with the
/// exception of `cfg_attr` which does!
MetaItem,
+ /// Edge case 2 -- eager macros expand their input to a delimited list of comma separated expressions
+ MacroEagerInput,
}
impl TopEntryPoint {
@@ -87,6 +89,7 @@ impl TopEntryPoint {
TopEntryPoint::Type => grammar::entry::top::type_,
TopEntryPoint::Expr => grammar::entry::top::expr,
TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
+ TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input,
};
let mut p = parser::Parser::new(input);
entry_point(&mut p);
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
index 5cdb39700..53cdad649 100644
--- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -24,7 +24,7 @@ pub enum StrStep<'a> {
Error { msg: &'a str, pos: usize },
}
-impl<'a> LexedStr<'a> {
+impl LexedStr<'_> {
pub fn to_input(&self) -> crate::Input {
let mut res = crate::Input::default();
let mut was_joint = false;
@@ -223,7 +223,8 @@ fn n_attached_trivias<'a>(
) -> usize {
match kind {
CONST | ENUM | FN | IMPL | MACRO_CALL | MACRO_DEF | MACRO_RULES | MODULE | RECORD_FIELD
- | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT => {
+ | STATIC | STRUCT | TRAIT | TUPLE_FIELD | TYPE_ALIAS | UNION | USE | VARIANT
+ | EXTERN_CRATE => {
let mut res = 0;
let mut trivias = trivias.enumerate().peekable();
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
index a8fbcfacf..48f407623 100644
--- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
@@ -262,6 +262,7 @@ pub enum SyntaxKind {
TYPE_BOUND_LIST,
MACRO_ITEMS,
MACRO_STMTS,
+ MACRO_EAGER_INPUT,
#[doc(hidden)]
__LAST,
}
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
index 11f9c34ab..2f3c7febc 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
@@ -6,7 +6,6 @@ fn vis() {
check(PrefixEntryPoint::Vis, "fn foo() {}", "");
check(PrefixEntryPoint::Vis, "pub(fn foo() {}", "pub");
check(PrefixEntryPoint::Vis, "pub(crate fn foo() {}", "pub(crate");
- check(PrefixEntryPoint::Vis, "crate fn foo() {}", "crate");
}
#[test]
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast
new file mode 100644
index 000000000..fc59db84e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast
@@ -0,0 +1,48 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "f"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ IMPL_TRAIT_TYPE
+ IMPL_KW "impl"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ ASSOC_TYPE_ARG
+ NAME_REF
+ IDENT "Item"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 30: missing associated item binding
+error 39: missing associated item binding
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs
new file mode 100644
index 000000000..e484e433a
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs
@@ -0,0 +1 @@
+fn f() -> impl Iterator<Item = , Item = > {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast
new file mode 100644
index 000000000..809ad1b8d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast
@@ -0,0 +1,44 @@
+SOURCE_FILE
+ STRUCT
+ STRUCT_KW "struct"
+ WHITESPACE " "
+ NAME
+ IDENT "A"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "N"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ COMMA ","
+ WHITESPACE " "
+ CONST_PARAM
+ CONST_KW "const"
+ WHITESPACE " "
+ NAME
+ IDENT "M"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i32"
+ WHITESPACE " "
+ EQ "="
+ R_ANGLE ">"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+error 23: expected a generic const argument
+error 40: expected a generic const argument
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs
new file mode 100644
index 000000000..5bab13da9
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs
@@ -0,0 +1 @@
+struct A<const N: i32 = , const M: i32 =>;
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
deleted file mode 100644
index 07b0210e4..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rast
+++ /dev/null
@@ -1,63 +0,0 @@
-SOURCE_FILE
- FN
- VISIBILITY
- CRATE_KW "crate"
- WHITESPACE " "
- FN_KW "fn"
- WHITESPACE " "
- NAME
- IDENT "main"
- PARAM_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE " "
- R_CURLY "}"
- WHITESPACE "\n"
- STRUCT
- STRUCT_KW "struct"
- WHITESPACE " "
- NAME
- IDENT "S"
- WHITESPACE " "
- RECORD_FIELD_LIST
- L_CURLY "{"
- WHITESPACE " "
- RECORD_FIELD
- VISIBILITY
- CRATE_KW "crate"
- WHITESPACE " "
- NAME
- IDENT "field"
- COLON ":"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "u32"
- WHITESPACE " "
- R_CURLY "}"
- WHITESPACE "\n"
- STRUCT
- STRUCT_KW "struct"
- WHITESPACE " "
- NAME
- IDENT "T"
- TUPLE_FIELD_LIST
- L_PAREN "("
- TUPLE_FIELD
- VISIBILITY
- CRATE_KW "crate"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "u32"
- R_PAREN ")"
- SEMICOLON ";"
- WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
deleted file mode 100644
index e2b5f2161..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-crate fn main() { }
-struct S { crate field: u32 }
-struct T(crate u32);
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
deleted file mode 100644
index 8d9b61630..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rast
+++ /dev/null
@@ -1,33 +0,0 @@
-SOURCE_FILE
- FN
- FN_KW "fn"
- WHITESPACE " "
- NAME
- IDENT "foo"
- PARAM_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE " "
- EXPR_STMT
- CALL_EXPR
- PATH_EXPR
- PATH
- PATH
- PATH_SEGMENT
- NAME_REF
- CRATE_KW "crate"
- COLON2 "::"
- PATH_SEGMENT
- NAME_REF
- IDENT "foo"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
- SEMICOLON ";"
- WHITESPACE " "
- R_CURLY "}"
- WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
deleted file mode 100644
index 0f454d121..000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0125_crate_keyword_path.rs
+++ /dev/null
@@ -1 +0,0 @@
-fn foo() { crate::foo(); }
diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs
index e0c20a414..88b8d0aee 100644
--- a/src/tools/rust-analyzer/crates/paths/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs
@@ -6,7 +6,7 @@
use std::{
borrow::Borrow,
ffi::OsStr,
- ops,
+ fmt, ops,
path::{Component, Path, PathBuf},
};
@@ -95,6 +95,12 @@ impl AbsPathBuf {
}
}
+impl fmt::Display for AbsPathBuf {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0.display(), f)
+ }
+}
+
/// Wrapper around an absolute [`Path`].
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
#[repr(transparent)]
@@ -217,6 +223,7 @@ impl AbsPath {
pub fn as_os_str(&self) -> &OsStr {
self.0.as_os_str()
}
+ #[deprecated(note = "use Display instead")]
pub fn display(&self) -> std::path::Display<'_> {
self.0.display()
}
@@ -227,6 +234,12 @@ impl AbsPath {
// endregion:delegate-methods
}
+impl fmt::Display for AbsPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.0.display(), f)
+ }
+}
+
/// Wrapper around a relative [`PathBuf`].
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct RelPathBuf(PathBuf);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
index d3486e755..4229f2891 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -12,7 +12,7 @@ rust-version.workspace = true
doctest = false
[dependencies]
-object = { version = "0.30.2", default-features = false, features = [
+object = { version = "0.32.0", default-features = false, features = [
"std",
"read_core",
"elf",
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
index 13f67a012..48efbf589 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs
@@ -135,7 +135,12 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
}
};
- let mut snappy_decoder = SnapDecoder::new(snappy_portion);
+ let mut uncompressed: Box<dyn Read> = if &snappy_portion[0..4] == b"rust" {
+ // Not compressed.
+ Box::new(snappy_portion)
+ } else {
+ Box::new(SnapDecoder::new(snappy_portion))
+ };
// the bytes before version string bytes, so this basically is:
// 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
@@ -144,11 +149,11 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
// so 13 bytes in total, and we should check the 13th byte
// to know the length
let mut bytes_before_version = [0u8; 13];
- snappy_decoder.read_exact(&mut bytes_before_version)?;
+ uncompressed.read_exact(&mut bytes_before_version)?;
let length = bytes_before_version[12];
let mut version_string_utf8 = vec![0u8; length as usize];
- snappy_decoder.read_exact(&mut version_string_utf8)?;
+ uncompressed.read_exact(&mut version_string_utf8)?;
let version_string = String::from_utf8(version_string_utf8);
version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
index d5eb157bf..99993f16e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
@@ -12,14 +12,14 @@ rust-version.workspace = true
doctest = false
[dependencies]
-object = { version = "0.30.2", default-features = false, features = [
+object = { version = "0.32.0", default-features = false, features = [
"std",
"read_core",
"elf",
"macho",
"pe",
] }
-libloading = "0.7.3"
+libloading = "0.8.0"
memmap2 = "0.5.4"
stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
index 1980d4c78..fe18451d3 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server.rs
@@ -17,7 +17,10 @@ use token_stream::TokenStreamBuilder;
mod symbol;
pub use symbol::*;
-use std::ops::{Bound, Range};
+use std::{
+ iter,
+ ops::{Bound, Range},
+};
use crate::tt;
@@ -80,9 +83,7 @@ impl server::TokenStream for RustAnalyzer {
stream.is_empty()
}
fn from_str(&mut self, src: &str) -> Self::TokenStream {
- use std::str::FromStr;
-
- Self::TokenStream::from_str(src).expect("cannot parse string")
+ src.parse().expect("cannot parse string")
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
@@ -101,7 +102,7 @@ impl server::TokenStream for RustAnalyzer {
},
};
let tree = TokenTree::from(group);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Ident(ident) => {
@@ -111,7 +112,7 @@ impl server::TokenStream for RustAnalyzer {
let ident: tt::Ident = tt::Ident { text, span: ident.span };
let leaf = tt::Leaf::from(ident);
let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Literal(literal) => {
@@ -123,7 +124,7 @@ impl server::TokenStream for RustAnalyzer {
let literal = tt::Literal { text, span: literal.0.span };
let leaf = tt::Leaf::from(literal);
let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
bridge::TokenTree::Punct(p) => {
@@ -134,7 +135,7 @@ impl server::TokenStream for RustAnalyzer {
};
let leaf = tt::Leaf::from(punct);
let tree = TokenTree::from(leaf);
- Self::TokenStream::from_iter(vec![tree])
+ Self::TokenStream::from_iter(iter::once(tree))
}
}
}
@@ -355,12 +356,12 @@ impl server::Server for RustAnalyzer {
}
fn intern_symbol(ident: &str) -> Self::Symbol {
- // FIXME: should be self.interner once the proc-macro api allows is
+ // FIXME: should be `self.interner` once the proc-macro api allows it.
Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
}
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
- // FIXME: should be self.interner once the proc-macro api allows is
+ // FIXME: should be `self.interner` once the proc-macro api allows it.
f(symbol.text(&SYMBOL_INTERNER).as_str())
}
}
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
index 602e74275..937834a82 100644
--- a/src/tools/rust-analyzer/crates/profile/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -15,7 +15,7 @@ doctest = false
once_cell = "1.17.0"
cfg-if = "1.0.0"
libc = "0.2.135"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
countme = { version = "3.0.1", features = ["enable"] }
jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
diff --git a/src/tools/rust-analyzer/crates/profile/src/tree.rs b/src/tools/rust-analyzer/crates/profile/src/tree.rs
index 62f0c30b5..1290fba36 100644
--- a/src/tools/rust-analyzer/crates/profile/src/tree.rs
+++ b/src/tools/rust-analyzer/crates/profile/src/tree.rs
@@ -72,7 +72,7 @@ struct NodeIter<'a, T> {
next: Option<Idx<T>>,
}
-impl<'a, T> Iterator for NodeIter<'a, T> {
+impl<T> Iterator for NodeIter<'_, T> {
type Item = Idx<T>;
fn next(&mut self) -> Option<Idx<T>> {
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
index 3abff64a8..75977fc5b 100644
--- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -20,7 +20,7 @@ serde_json.workspace = true
serde.workspace = true
triomphe.workspace = true
anyhow = "1.0.62"
-la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+la-arena.workspace = true
itertools = "0.10.5"
# local deps
diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
index 6cbf403cb..fb0f3ab7d 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs
@@ -225,9 +225,8 @@ impl WorkspaceBuildScripts {
let package_build_data = &mut res[idx].outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
- "{}: {:?}",
- workspace[package].manifest.parent().display(),
- package_build_data,
+ "{}: {package_build_data:?}",
+ workspace[package].manifest.parent(),
);
}
}
@@ -270,9 +269,8 @@ impl WorkspaceBuildScripts {
let package_build_data = &outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
- "{}: {:?}",
- workspace[package].manifest.parent().display(),
- package_build_data,
+ "{}: {package_build_data:?}",
+ workspace[package].manifest.parent(),
);
}
}
@@ -424,7 +422,7 @@ impl WorkspaceBuildScripts {
let target_libdir = AbsPathBuf::try_from(PathBuf::from(target_libdir))
.map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?;
- tracing::info!("Loading rustc proc-macro paths from {}", target_libdir.display());
+ tracing::info!("Loading rustc proc-macro paths from {target_libdir}");
let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)?
.filter_map(|entry| {
@@ -458,9 +456,8 @@ impl WorkspaceBuildScripts {
let package_build_data = &bs.outputs[package];
if !package_build_data.is_unchanged() {
tracing::info!(
- "{}: {:?}",
- rustc[package].manifest.parent().display(),
- package_build_data,
+ "{}: {package_build_data:?}",
+ rustc[package].manifest.parent(),
);
}
}
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index 92b454150..e47808a2c 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -4,7 +4,7 @@ use std::path::PathBuf;
use std::str::from_utf8;
use std::{ops, process::Command};
-use anyhow::{Context, Result};
+use anyhow::Context;
use base_db::Edition;
use cargo_metadata::{CargoOpt, MetadataCommand};
use la_arena::{Arena, Idx};
@@ -145,7 +145,7 @@ pub struct PackageDependency {
pub kind: DepKind,
}
-#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DepKind {
/// Available to the library, binary, and dev targets in the package (but not the build script).
Normal,
@@ -156,23 +156,20 @@ pub enum DepKind {
}
impl DepKind {
- fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ {
- let mut dep_kinds = Vec::new();
+ fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> {
+ let mut dep_kinds = [None; 3];
if list.is_empty() {
- dep_kinds.push(Self::Normal);
+ dep_kinds[0] = Some(Self::Normal);
}
for info in list {
- let kind = match info.kind {
- cargo_metadata::DependencyKind::Normal => Self::Normal,
- cargo_metadata::DependencyKind::Development => Self::Dev,
- cargo_metadata::DependencyKind::Build => Self::Build,
+ match info.kind {
+ cargo_metadata::DependencyKind::Normal => dep_kinds[0] = Some(Self::Normal),
+ cargo_metadata::DependencyKind::Development => dep_kinds[1] = Some(Self::Dev),
+ cargo_metadata::DependencyKind::Build => dep_kinds[2] = Some(Self::Build),
cargo_metadata::DependencyKind::Unknown => continue,
- };
- dep_kinds.push(kind);
+ }
}
- dep_kinds.sort_unstable();
- dep_kinds.dedup();
- dep_kinds.into_iter()
+ dep_kinds.into_iter().flatten()
}
}
@@ -236,7 +233,7 @@ impl CargoWorkspace {
current_dir: &AbsPath,
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Result<cargo_metadata::Metadata> {
+ ) -> anyhow::Result<cargo_metadata::Metadata> {
let targets = find_list_of_build_targets(config, cargo_toml);
let mut meta = MetadataCommand::new();
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
index 61acc646f..901dcfd2b 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -31,12 +31,13 @@ pub mod target_data_layout;
mod tests;
use std::{
+ fmt,
fs::{self, read_dir, ReadDir},
io,
process::Command,
};
-use anyhow::{bail, format_err, Context, Result};
+use anyhow::{bail, format_err, Context};
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashSet;
@@ -59,19 +60,19 @@ pub enum ProjectManifest {
}
impl ProjectManifest {
- pub fn from_manifest_file(path: AbsPathBuf) -> Result<ProjectManifest> {
+ pub fn from_manifest_file(path: AbsPathBuf) -> anyhow::Result<ProjectManifest> {
let path = ManifestPath::try_from(path)
- .map_err(|path| format_err!("bad manifest path: {}", path.display()))?;
+ .map_err(|path| format_err!("bad manifest path: {path}"))?;
if path.file_name().unwrap_or_default() == "rust-project.json" {
return Ok(ProjectManifest::ProjectJson(path));
}
if path.file_name().unwrap_or_default() == "Cargo.toml" {
return Ok(ProjectManifest::CargoToml(path));
}
- bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display());
+ bail!("project root must point to Cargo.toml or rust-project.json: {path}");
}
- pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> {
+ pub fn discover_single(path: &AbsPath) -> anyhow::Result<ProjectManifest> {
let mut candidates = ProjectManifest::discover(path)?;
let res = match candidates.pop() {
None => bail!("no projects"),
@@ -145,7 +146,17 @@ impl ProjectManifest {
}
}
-fn utf8_stdout(mut cmd: Command) -> Result<String> {
+impl fmt::Display for ProjectManifest {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ProjectManifest::ProjectJson(it) | ProjectManifest::CargoToml(it) => {
+ fmt::Display::fmt(&it, f)
+ }
+ }
+ }
+}
+
+fn utf8_stdout(mut cmd: Command) -> anyhow::Result<String> {
let output = cmd.output().with_context(|| format!("{cmd:?} failed"))?;
if !output.status.success() {
match String::from_utf8(output.stderr) {
diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
index 3f60e4dd9..490e1a4ea 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs
@@ -1,5 +1,5 @@
//! See [`ManifestPath`].
-use std::{ops, path::Path};
+use std::{fmt, ops, path::Path};
use paths::{AbsPath, AbsPathBuf};
@@ -40,6 +40,12 @@ impl ManifestPath {
}
}
+impl fmt::Display for ManifestPath {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.file, f)
+ }
+}
+
impl ops::Deref for ManifestPath {
type Target = AbsPath;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
index 0066f6717..8392718b2 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs
@@ -2,7 +2,6 @@
use std::process::Command;
-use anyhow::Result;
use rustc_hash::FxHashMap;
use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath};
@@ -23,6 +22,9 @@ pub(crate) fn get(
}
}
+ // Add miri cfg, which is useful for mir eval in stdlib
+ res.push(CfgFlag::Atom("miri".into()));
+
match get_rust_cfgs(cargo_toml, target, extra_env) {
Ok(rustc_cfgs) => {
tracing::debug!(
@@ -44,7 +46,7 @@ fn get_rust_cfgs(
cargo_toml: Option<&ManifestPath>,
target: Option<&str>,
extra_env: &FxHashMap<String, String>,
-) -> Result<String> {
+) -> anyhow::Result<String> {
if let Some(cargo_toml) = cargo_toml {
let mut cargo_config = Command::new(toolchain::cargo());
cargo_config.envs(extra_env);
diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
index e3a2de927..da862c9e8 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
@@ -85,9 +85,8 @@ impl Sysroot {
" try running `rustup component add rust-src` to possible fix this"
};
Some(format!(
- "could not find libcore in loaded sysroot at `{}`{}",
- self.src_root.as_path().display(),
- var_note,
+ "could not find libcore in loaded sysroot at `{}`{var_note}",
+ self.src_root.as_path(),
))
} else {
None
@@ -99,7 +98,7 @@ impl Sysroot {
impl Sysroot {
/// Attempts to discover the toolchain's sysroot from the given `dir`.
pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> {
- tracing::debug!("discovering sysroot for {}", dir.display());
+ tracing::debug!("discovering sysroot for {dir}");
let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
let sysroot_src_dir =
discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?;
@@ -111,7 +110,7 @@ impl Sysroot {
extra_env: &FxHashMap<String, String>,
src: AbsPathBuf,
) -> Result<Sysroot> {
- tracing::debug!("discovering sysroot for {}", current_dir.display());
+ tracing::debug!("discovering sysroot for {current_dir}");
let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?;
Ok(Sysroot::load(sysroot_dir, src))
}
@@ -122,7 +121,7 @@ impl Sysroot {
pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> {
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
- format_err!("can't load standard library from sysroot path {}", sysroot_dir.display())
+ format_err!("can't load standard library from sysroot path {sysroot_dir}")
})?;
Ok(Sysroot::load(sysroot_dir, sysroot_src_dir))
}
@@ -220,10 +219,10 @@ fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
let core = path.join("core");
if fs::metadata(&core).is_ok() {
- tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
+ tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {path}");
return Some(path);
}
- tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
+ tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {core:?}), ignoring");
} else {
tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
}
@@ -250,10 +249,9 @@ fn discover_sysroot_src_dir_or_add_component(
format_err!(
"\
can't load standard library from sysroot
-{}
+{sysroot_path}
(discovered via `rustc --print sysroot`)
try installing the Rust source the same way you installed rustc",
- sysroot_path.display(),
)
})
}
@@ -261,7 +259,7 @@ try installing the Rust source the same way you installed rustc",
fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
- tracing::debug!("checking for rustc source code: {}", rustc_src.display());
+ tracing::debug!("checking for rustc source code: {rustc_src}");
if fs::metadata(&rustc_src).is_ok() {
Some(rustc_src)
} else {
@@ -271,7 +269,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
- tracing::debug!("checking sysroot library: {}", rust_src.display());
+ tracing::debug!("checking sysroot library: {rust_src}");
if fs::metadata(&rust_src).is_ok() {
Some(rust_src)
} else {
diff --git a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
index 30ca7b348..cb995857e 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
@@ -1,7 +1,6 @@
//! Runs `rustc --print target-spec-json` to get the target_data_layout.
use std::process::Command;
-use anyhow::Result;
use rustc_hash::FxHashMap;
use crate::{utf8_stdout, ManifestPath};
@@ -10,7 +9,7 @@ pub fn get(
cargo_toml: Option<&ManifestPath>,
target: Option<&str>,
extra_env: &FxHashMap<String, String>,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let output = (|| {
if let Some(cargo_toml) = cargo_toml {
let mut cmd = Command::new(toolchain::rustc());
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index b5fe237fc..f51ea7eeb 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -4,7 +4,7 @@
use std::{collections::VecDeque, fmt, fs, process::Command, sync};
-use anyhow::{format_err, Context, Result};
+use anyhow::{format_err, Context};
use base_db::{
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult,
@@ -151,7 +151,16 @@ impl ProjectWorkspace {
manifest: ProjectManifest,
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Result<ProjectWorkspace> {
+ ) -> anyhow::Result<ProjectWorkspace> {
+ ProjectWorkspace::load_inner(&manifest, config, progress)
+ .with_context(|| format!("Failed to load the project at {manifest}"))
+ }
+
+ fn load_inner(
+ manifest: &ProjectManifest,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> anyhow::Result<ProjectWorkspace> {
let version = |current_dir, cmd_path, prefix: &str| {
let cargo_version = utf8_stdout({
let mut cmd = Command::new(cmd_path);
@@ -167,12 +176,10 @@ impl ProjectWorkspace {
};
let res = match manifest {
ProjectManifest::ProjectJson(project_json) => {
- let file = fs::read_to_string(&project_json).with_context(|| {
- format!("Failed to read json file {}", project_json.display())
- })?;
- let data = serde_json::from_str(&file).with_context(|| {
- format!("Failed to deserialize json file {}", project_json.display())
- })?;
+ let file = fs::read_to_string(&project_json)
+ .with_context(|| format!("Failed to read json file {project_json}"))?;
+ let data = serde_json::from_str(&file)
+ .with_context(|| format!("Failed to deserialize json file {project_json}"))?;
let project_location = project_json.parent().to_path_buf();
let toolchain = version(&*project_location, toolchain::rustc(), "rustc ")?;
let project_json = ProjectJson::new(&project_location, data);
@@ -193,9 +200,7 @@ impl ProjectWorkspace {
)
.with_context(|| {
format!(
- "Failed to read Cargo metadata from Cargo.toml file {}, {:?}",
- cargo_toml.display(),
- toolchain
+ "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}",
)
})?;
let cargo = CargoWorkspace::new(meta);
@@ -203,12 +208,12 @@ impl ProjectWorkspace {
let sysroot = match (&config.sysroot, &config.sysroot_src) {
(Some(RustLibSource::Path(path)), None) => {
Sysroot::with_sysroot_dir(path.clone()).map_err(|e| {
- Some(format!("Failed to find sysroot at {}:{e}", path.display()))
+ Some(format!("Failed to find sysroot at {path}:{e}"))
})
}
(Some(RustLibSource::Discover), None) => {
Sysroot::discover(cargo_toml.parent(), &config.extra_env).map_err(|e| {
- Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
+ Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}"))
})
}
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
@@ -220,21 +225,19 @@ impl ProjectWorkspace {
&config.extra_env,
sysroot_src.clone(),
).map_err(|e| {
- Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display()))
+ Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}"))
})
}
(None, _) => Err(None),
};
if let Ok(sysroot) = &sysroot {
- tracing::info!(workspace = %cargo_toml.display(), src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ tracing::info!(workspace = %cargo_toml, src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
let rustc_dir = match &config.rustc_source {
Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
- .map_err(|p| {
- Some(format!("rustc source path is not absolute: {}", p.display()))
- }),
+ .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
Some(RustLibSource::Discover) => {
sysroot.as_ref().ok().and_then(Sysroot::discover_rustc).ok_or_else(|| {
Some(format!("Failed to discover rustc source for sysroot."))
@@ -244,7 +247,7 @@ impl ProjectWorkspace {
};
let rustc = rustc_dir.and_then(|rustc_dir| {
- tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source");
+ tracing::info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
match CargoWorkspace::fetch_metadata(
&rustc_dir,
cargo_toml.parent(),
@@ -266,13 +269,11 @@ impl ProjectWorkspace {
Err(e) => {
tracing::error!(
%e,
- "Failed to read Cargo metadata from rustc source at {}",
- rustc_dir.display()
+ "Failed to read Cargo metadata from rustc source at {rustc_dir}",
);
Err(Some(format!(
- "Failed to read Cargo metadata from rustc source at {}: {e}",
- rustc_dir.display())
- ))
+ "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
+ )))
}
}
});
@@ -330,7 +331,7 @@ impl ProjectWorkspace {
(None, None) => Err(None),
};
if let Ok(sysroot) = &sysroot {
- tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
let rustc_cfg = rustc_cfg::get(None, target, extra_env);
@@ -340,26 +341,23 @@ impl ProjectWorkspace {
pub fn load_detached_files(
detached_files: Vec<AbsPathBuf>,
config: &CargoConfig,
- ) -> Result<ProjectWorkspace> {
+ ) -> anyhow::Result<ProjectWorkspace> {
let sysroot = match &config.sysroot {
Some(RustLibSource::Path(path)) => Sysroot::with_sysroot_dir(path.clone())
- .map_err(|e| Some(format!("Failed to find sysroot at {}:{e}", path.display()))),
+ .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}"))),
Some(RustLibSource::Discover) => {
let dir = &detached_files
.first()
.and_then(|it| it.parent())
.ok_or_else(|| format_err!("No detached files to load"))?;
Sysroot::discover(dir, &config.extra_env).map_err(|e| {
- Some(format!(
- "Failed to find sysroot for {}. Is rust-src installed? {e}",
- dir.display()
- ))
+ Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}"))
})
}
None => Err(None),
};
if let Ok(sysroot) = &sysroot {
- tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot");
}
let rustc_cfg = rustc_cfg::get(None, None, &Default::default());
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
@@ -370,15 +368,12 @@ impl ProjectWorkspace {
&self,
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Result<WorkspaceBuildScripts> {
+ ) -> anyhow::Result<WorkspaceBuildScripts> {
match self {
ProjectWorkspace::Cargo { cargo, toolchain, .. } => {
WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain)
.with_context(|| {
- format!(
- "Failed to run build scripts for {}",
- &cargo.workspace_root().display()
- )
+ format!("Failed to run build scripts for {}", cargo.workspace_root())
})
}
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
@@ -393,7 +388,7 @@ impl ProjectWorkspace {
workspaces: &[ProjectWorkspace],
config: &CargoConfig,
progress: &dyn Fn(String),
- ) -> Vec<Result<WorkspaceBuildScripts>> {
+ ) -> Vec<anyhow::Result<WorkspaceBuildScripts>> {
if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace)
|| config.run_build_script_command.is_none()
{
@@ -419,10 +414,7 @@ impl ProjectWorkspace {
ProjectWorkspace::Cargo { cargo, .. } => match outputs {
Ok(outputs) => Ok(outputs.next().unwrap()),
Err(e) => Err(e.clone()).with_context(|| {
- format!(
- "Failed to run build scripts for {}",
- &cargo.workspace_root().display()
- )
+ format!("Failed to run build scripts for {}", cargo.workspace_root())
}),
},
_ => Ok(WorkspaceBuildScripts::default()),
@@ -447,7 +439,7 @@ impl ProjectWorkspace {
}
}
- pub fn find_sysroot_proc_macro_srv(&self) -> Result<AbsPathBuf> {
+ pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
match self {
ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. }
| ProjectWorkspace::Json { sysroot: Ok(sysroot), .. }
@@ -459,22 +451,22 @@ impl ProjectWorkspace {
.map(|segment| sysroot.root().join(segment).join(&standalone_server_name))
.find(|server_path| std::fs::metadata(server_path).is_ok())
.ok_or_else(|| {
- anyhow::anyhow!(
+ anyhow::format_err!(
"cannot find proc-macro server in sysroot `{}`",
- sysroot.root().display()
+ sysroot.root()
)
})
}
ProjectWorkspace::DetachedFiles { .. } => {
- Err(anyhow::anyhow!("cannot find proc-macro server, no sysroot was found"))
+ Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found"))
}
- ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::anyhow!(
+ ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::format_err!(
"cannot find proc-macro-srv, the workspace `{}` is missing a sysroot",
- cargo.workspace_root().display()
+ cargo.workspace_root()
)),
- ProjectWorkspace::Json { project, .. } => Err(anyhow::anyhow!(
+ ProjectWorkspace::Json { project, .. } => Err(anyhow::format_err!(
"cannot find proc-macro-srv, the workspace `{}` is missing a sysroot",
- project.path().display()
+ project.path()
)),
}
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
index 5b72d5756..5bfac7ee4 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -34,10 +34,9 @@ serde.workspace = true
rayon = "1.6.1"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
-lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" }
+lsp-server.workspace = true
tracing = "0.1.35"
tracing-subscriber = { version = "0.3.16", default-features = false, features = [
- "env-filter",
"registry",
"fmt",
"tracing-log",
@@ -48,12 +47,8 @@ triomphe.workspace = true
nohash-hasher.workspace = true
always-assert = "0.1.2"
-# These dependencies are unused, but we pin them to a version here to restrict them for our transitive dependencies
-# so that we don't pull in duplicates of their dependencies like windows-sys and syn 1 vs 2
-# these would pull in serde 2
-thiserror = "=1.0.39"
-serde_repr = "=0.1.11"
-# these would pull in windows-sys 0.45.0
+# These 3 deps are not used by r-a directly, but we list them here to lock in their versions
+# in our transitive deps to prevent them from pulling in windows-sys 0.45.0
mio = "=0.8.5"
filetime = "=0.2.19"
parking_lot_core = "=0.9.6"
@@ -67,13 +62,13 @@ ide-db.workspace = true
# This should only be used in CLI
ide-ssr.workspace = true
ide.workspace = true
+load-cargo.workspace = true
proc-macro-api.workspace = true
profile.workspace = true
project-model.workspace = true
stdx.workspace = true
syntax.workspace = true
toolchain.workspace = true
-tt.workspace = true
vfs-notify.workspace = true
vfs.workspace = true
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
index 8caadecd8..1f923f6cf 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs
@@ -8,10 +8,11 @@ use std::{
sync::Arc,
};
-use rust_analyzer::Result;
+use anyhow::Context;
use tracing::{level_filters::LevelFilter, Event, Subscriber};
use tracing_log::NormalizeEvent;
use tracing_subscriber::{
+ filter::Targets,
fmt::{
format::Writer, writer::BoxMakeWriter, FmtContext, FormatEvent, FormatFields,
FormattedFields, MakeWriter,
@@ -19,81 +20,62 @@ use tracing_subscriber::{
layer::SubscriberExt,
registry::LookupSpan,
util::SubscriberInitExt,
- EnvFilter, Registry,
+ Registry,
};
use tracing_tree::HierarchicalLayer;
-pub(crate) struct Logger {
- filter: EnvFilter,
- file: Option<File>,
+pub(crate) struct LoggerConfig {
+ pub(crate) log_file: Option<File>,
+ pub(crate) filter: String,
+ pub(crate) chalk_filter: Option<String>,
}
struct MakeWriterStderr;
-impl<'a> MakeWriter<'a> for MakeWriterStderr {
+impl MakeWriter<'_> for MakeWriterStderr {
type Writer = Stderr;
- fn make_writer(&'a self) -> Self::Writer {
+ fn make_writer(&self) -> Self::Writer {
io::stderr()
}
}
-impl Logger {
- pub(crate) fn new(file: Option<File>, filter: Option<&str>) -> Logger {
- let filter = filter.map_or(EnvFilter::default(), EnvFilter::new);
-
- Logger { filter, file }
- }
+impl LoggerConfig {
+ pub(crate) fn init(self) -> anyhow::Result<()> {
+ let mut filter: Targets = self
+ .filter
+ .parse()
+ .with_context(|| format!("invalid log filter: `{}`", self.filter))?;
+
+ let mut chalk_layer = None;
+ if let Some(chalk_filter) = self.chalk_filter {
+ let level: LevelFilter =
+ chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
+ chalk_layer = Some(
+ HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(io::stderr),
+ );
+ filter = filter
+ .with_target("chalk_solve", level)
+ .with_target("chalk_ir", level)
+ .with_target("chalk_recursive", level);
+ };
- pub(crate) fn install(self) -> Result<()> {
- // The meaning of CHALK_DEBUG I suspected is to tell chalk crates
- // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
- // logs. But now we can only have just one filter, which means we have to
- // merge chalk filter to our main filter (from RA_LOG env).
- //
- // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
- // As the value should only affect chalk crates, we'd better manually
- // specify the target. And for simplicity, CHALK_DEBUG only accept the value
- // that specify level.
- let chalk_level_dir = std::env::var("CHALK_DEBUG")
- .map(|val| {
- val.parse::<LevelFilter>().expect(
- "invalid CHALK_DEBUG value, expect right log level (like debug or trace)",
- )
- })
- .ok();
-
- let chalk_layer = HierarchicalLayer::default()
- .with_indent_lines(true)
- .with_ansi(false)
- .with_indent_amount(2)
- .with_writer(io::stderr);
-
- let writer = match self.file {
+ let writer = match self.log_file {
Some(file) => BoxMakeWriter::new(Arc::new(file)),
None => BoxMakeWriter::new(io::stderr),
};
let ra_fmt_layer =
tracing_subscriber::fmt::layer().event_format(LoggerFormatter).with_writer(writer);
- match chalk_level_dir {
- Some(val) => {
- Registry::default()
- .with(
- self.filter
- .add_directive(format!("chalk_solve={val}").parse()?)
- .add_directive(format!("chalk_ir={val}").parse()?)
- .add_directive(format!("chalk_recursive={val}").parse()?),
- )
- .with(ra_fmt_layer)
- .with(chalk_layer)
- .init();
- }
- None => {
- Registry::default().with(self.filter).with(ra_fmt_layer).init();
- }
- };
-
+ let registry = Registry::default().with(filter).with(ra_fmt_layer);
+ match chalk_layer {
+ Some(chalk_layer) => registry.with(chalk_layer).init(),
+ None => registry.init(),
+ }
Ok(())
}
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
index 91911dd18..2fa14fc7e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -7,14 +7,11 @@
mod logger;
mod rustc_wrapper;
-use std::{
- env, fs,
- path::{Path, PathBuf},
- process,
-};
+use std::{env, fs, path::PathBuf, process};
+use anyhow::Context;
use lsp_server::Connection;
-use rust_analyzer::{cli::flags, config::Config, from_json, Result};
+use rust_analyzer::{cli::flags, config::Config, from_json};
use vfs::AbsPathBuf;
#[cfg(all(feature = "mimalloc"))]
@@ -25,7 +22,7 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
-fn main() {
+fn main() -> anyhow::Result<()> {
if std::env::var("RA_RUSTC_WRAPPER").is_ok() {
let mut args = std::env::args_os();
let _me = args.next().unwrap();
@@ -41,14 +38,7 @@ fn main() {
}
let flags = flags::RustAnalyzer::from_env_or_exit();
- if let Err(err) = try_main(flags) {
- tracing::error!("Unexpected error: {}", err);
- eprintln!("{err}");
- process::exit(101);
- }
-}
-fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
#[cfg(debug_assertions)]
if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
#[allow(unused_mut)]
@@ -58,14 +48,8 @@ fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
}
}
- let mut log_file = flags.log_file.as_deref();
-
- let env_log_file = env::var("RA_LOG_FILE").ok();
- if let Some(env_log_file) = env_log_file.as_deref() {
- log_file = Some(Path::new(env_log_file));
- }
+ setup_logging(flags.log_file.clone())?;
- setup_logging(log_file)?;
let verbosity = flags.verbosity();
match flags.subcommand {
@@ -98,11 +82,12 @@ fn try_main(flags: flags::RustAnalyzer) -> Result<()> {
flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
+ flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?,
}
Ok(())
}
-fn setup_logging(log_file: Option<&Path>) -> Result<()> {
+fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
if cfg!(windows) {
// This is required so that windows finds our pdb that is placed right beside the exe.
// By default it doesn't look at the folder the exe resides in, only in the current working
@@ -115,23 +100,42 @@ fn setup_logging(log_file: Option<&Path>) -> Result<()> {
}
}
}
+
if env::var("RUST_BACKTRACE").is_err() {
env::set_var("RUST_BACKTRACE", "short");
}
+ let log_file = env::var("RA_LOG_FILE").ok().map(PathBuf::from).or(log_file_flag);
let log_file = match log_file {
Some(path) => {
if let Some(parent) = path.parent() {
let _ = fs::create_dir_all(parent);
}
- Some(fs::File::create(path)?)
+ Some(
+ fs::File::create(&path)
+ .with_context(|| format!("can't create log file at {}", path.display()))?,
+ )
}
None => None,
};
- let filter = env::var("RA_LOG").ok();
- // deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually useful
- // information in there for debugging
- logger::Logger::new(log_file, filter.as_deref().or(Some("error"))).install()?;
+
+ logger::LoggerConfig {
+ log_file,
+ // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually
+ // useful information in there for debugging.
+ filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()),
+ // The meaning of CHALK_DEBUG I suspected is to tell chalk crates
+ // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing
+ // logs. But now we can only have just one filter, which means we have to
+ // merge chalk filter to our main filter (from RA_LOG env).
+ //
+ // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
+ // As the value should only affect chalk crates, we'd better manually
+ // specify the target. And for simplicity, CHALK_DEBUG only accept the value
+ // that specify level.
+ chalk_filter: env::var("CHALK_DEBUG").ok(),
+ }
+ .init()?;
profile::init();
@@ -146,8 +150,8 @@ const STACK_SIZE: usize = 1024 * 1024 * 8;
fn with_extra_thread(
thread_name: impl Into<String>,
thread_intent: stdx::thread::ThreadIntent,
- f: impl FnOnce() -> Result<()> + Send + 'static,
-) -> Result<()> {
+ f: impl FnOnce() -> anyhow::Result<()> + Send + 'static,
+) -> anyhow::Result<()> {
let handle = stdx::thread::Builder::new(thread_intent)
.name(thread_name.into())
.stack_size(STACK_SIZE)
@@ -158,7 +162,7 @@ fn with_extra_thread(
Ok(())
}
-fn run_server() -> Result<()> {
+fn run_server() -> anyhow::Result<()> {
tracing::info!("server version {} will start", rust_analyzer::version());
let (connection, io_threads) = Connection::stdio();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
index e35201921..64646b33a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
@@ -1,7 +1,6 @@
//! Various batch processing tasks, intended primarily for debugging.
pub mod flags;
-pub mod load_cargo;
mod parse;
mod symbols;
mod highlight;
@@ -10,13 +9,17 @@ mod diagnostics;
mod ssr;
mod lsif;
mod scip;
+mod run_tests;
mod progress_report;
use std::io::Read;
use anyhow::Result;
+use hir::{Module, Name};
+use hir_ty::db::HirDatabase;
use ide::AnalysisHost;
+use itertools::Itertools;
use vfs::Vfs;
#[derive(Clone, Copy)]
@@ -36,7 +39,7 @@ impl Verbosity {
}
}
-fn read_stdin() -> Result<String> {
+fn read_stdin() -> anyhow::Result<String> {
let mut buff = String::new();
std::io::stdin().read_to_string(&mut buff)?;
Ok(buff)
@@ -71,3 +74,14 @@ fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
eprintln!("{remaining:>8} Remaining");
}
+
+fn full_name_of_item(db: &dyn HirDatabase, module: Module, name: Name) -> String {
+ module
+ .path_to_root(db)
+ .into_iter()
+ .rev()
+ .filter_map(|it| it.name(db))
+ .chain(Some(name))
+ .map(|it| it.display(db.upcast()).to_string())
+ .join("::")
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index 4cb917ce2..f446a7c05 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -8,14 +8,14 @@ use std::{
use hir::{
db::{DefDatabase, ExpandDatabase, HirDatabase},
- Adt, AssocItem, Crate, DefWithBody, HasCrate, HasSource, HirDisplay, ModuleDef, Name,
+ Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, ModuleDef, Name,
};
use hir_def::{
body::{BodySourceMap, SyntheticSyntax},
hir::{ExprId, PatId},
};
use hir_ty::{Interner, Substitution, TyExt, TypeFlags};
-use ide::{LineCol, RootDatabase};
+use ide::{Analysis, AnnotationConfig, DiagnosticsConfig, InlayHintsConfig, LineCol, RootDatabase};
use ide_db::{
base_db::{
salsa::{self, debug::DebugQueryTable, ParallelDatabase},
@@ -24,20 +24,20 @@ use ide_db::{
LineIndexDatabase,
};
use itertools::Itertools;
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use oorandom::Rand32;
use profile::{Bytes, StopWatch};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
use syntax::{AstNode, SyntaxNode};
-use vfs::{AbsPathBuf, Vfs, VfsPath};
+use vfs::{AbsPathBuf, FileId, Vfs, VfsPath};
use crate::cli::{
flags::{self, OutputFormat},
- load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice},
- print_memory_usage,
+ full_name_of_item, print_memory_usage,
progress_report::ProgressReport,
- report_metric, Result, Verbosity,
+ report_metric, Verbosity,
};
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
@@ -49,7 +49,7 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
}
impl flags::AnalysisStats {
- pub fn run(self, verbosity: Verbosity) -> Result<()> {
+ pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
let mut rng = {
let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64;
Rand32::new(seed)
@@ -95,17 +95,41 @@ impl flags::AnalysisStats {
eprintln!(")");
let mut analysis_sw = self.stop_watch();
- let mut num_crates = 0;
- let mut visited_modules = FxHashSet::default();
- let mut visit_queue = Vec::new();
let mut krates = Crate::all(db);
if self.randomize {
shuffle(&mut rng, &mut krates);
}
+
+ let mut item_tree_sw = self.stop_watch();
+ let mut num_item_trees = 0;
+ let source_roots =
+ krates.iter().cloned().map(|krate| db.file_source_root(krate.root_file(db))).unique();
+ for source_root_id in source_roots {
+ let source_root = db.source_root(source_root_id);
+ if !source_root.is_library || self.with_deps {
+ for file_id in source_root.iter() {
+ if let Some(p) = source_root.path_for_file(&file_id) {
+ if let Some((_, Some("rs"))) = p.name_and_extension() {
+ db.file_item_tree(file_id.into());
+ num_item_trees += 1;
+ }
+ }
+ }
+ }
+ }
+ eprintln!(" item trees: {num_item_trees}");
+ let item_tree_time = item_tree_sw.elapsed();
+ eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time);
+ report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms");
+
+ let mut crate_def_map_sw = self.stop_watch();
+ let mut num_crates = 0;
+ let mut visited_modules = FxHashSet::default();
+ let mut visit_queue = Vec::new();
for krate in krates {
- let module = krate.root_module(db);
- let file_id = module.definition_source(db).file_id;
+ let module = krate.root_module();
+ let file_id = module.definition_source_file_id(db);
let file_id = file_id.original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
@@ -124,8 +148,10 @@ impl flags::AnalysisStats {
let mut bodies = Vec::new();
let mut adts = Vec::new();
let mut consts = Vec::new();
+ let mut file_ids = Vec::new();
while let Some(module) = visit_queue.pop() {
if visited_modules.insert(module) {
+ file_ids.extend(module.as_source_file_id(db));
visit_queue.extend(module.children(db));
for decl in module.declarations(db) {
@@ -171,7 +197,9 @@ impl flags::AnalysisStats {
adts.len(),
consts.len(),
);
- eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
+ let crate_def_map_time = crate_def_map_sw.elapsed();
+ eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
+ report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms");
if self.randomize {
shuffle(&mut rng, &mut bodies);
@@ -197,6 +225,10 @@ impl flags::AnalysisStats {
self.run_const_eval(db, &consts, verbosity);
}
+ if self.run_all_ide_things {
+ self.run_ide_things(host.analysis(), file_ids);
+ }
+
let total_span = analysis_sw.elapsed();
eprintln!("{:<20} {total_span}", "Total:");
report_metric("total time", total_span.time.as_millis() as u64, "ms");
@@ -242,21 +274,15 @@ impl flags::AnalysisStats {
continue;
}
all += 1;
- let Err(e)
- = db.layout_of_adt(hir_def::AdtId::from(a).into(), Substitution::empty(Interner), a.krate(db).into())
- else {
- continue
+ let Err(e) = db.layout_of_adt(
+ hir_def::AdtId::from(a).into(),
+ Substitution::empty(Interner),
+ db.trait_environment(a.into()),
+ ) else {
+ continue;
};
if verbosity.is_spammy() {
- let full_name = a
- .module(db)
- .path_to_root(db)
- .into_iter()
- .rev()
- .filter_map(|it| it.name(db))
- .chain(Some(a.name(db)))
- .map(|it| it.display(db).to_string())
- .join("::");
+ let full_name = full_name_of_item(db, a.module(db), a.name(db));
println!("Data layout for {full_name} failed due {e:?}");
}
fail += 1;
@@ -278,15 +304,8 @@ impl flags::AnalysisStats {
continue;
};
if verbosity.is_spammy() {
- let full_name = c
- .module(db)
- .path_to_root(db)
- .into_iter()
- .rev()
- .filter_map(|it| it.name(db))
- .chain(c.name(db))
- .map(|it| it.display(db).to_string())
- .join("::");
+ let full_name =
+ full_name_of_item(db, c.module(db), c.name(db).unwrap_or(Name::missing()));
println!("Const eval for {full_name} failed due {e:?}");
}
fail += 1;
@@ -717,6 +736,83 @@ impl flags::AnalysisStats {
report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms");
}
+ fn run_ide_things(&self, analysis: Analysis, mut file_ids: Vec<FileId>) {
+ file_ids.sort();
+ file_ids.dedup();
+ let mut sw = self.stop_watch();
+
+ for &file_id in &file_ids {
+ _ = analysis.diagnostics(
+ &DiagnosticsConfig {
+ enabled: true,
+ proc_macros_enabled: true,
+ proc_attr_macros_enabled: true,
+ disable_experimental: false,
+ disabled: Default::default(),
+ expr_fill_default: Default::default(),
+ insert_use: ide_db::imports::insert_use::InsertUseConfig {
+ granularity: ide_db::imports::insert_use::ImportGranularity::Crate,
+ enforce_granularity: true,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ group: true,
+ skip_glob_imports: true,
+ },
+ prefer_no_std: Default::default(),
+ },
+ ide::AssistResolveStrategy::All,
+ file_id,
+ );
+ }
+ for &file_id in &file_ids {
+ _ = analysis.inlay_hints(
+ &InlayHintsConfig {
+ render_colons: false,
+ type_hints: true,
+ discriminant_hints: ide::DiscriminantHints::Always,
+ parameter_hints: true,
+ chaining_hints: true,
+ adjustment_hints: ide::AdjustmentHints::Always,
+ adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix,
+ adjustment_hints_hide_outside_unsafe: false,
+ closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
+ closure_capture_hints: true,
+ binding_mode_hints: true,
+ lifetime_elision_hints: ide::LifetimeElisionHints::Always,
+ param_names_for_lifetime_elision_hints: true,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ closure_style: hir::ClosureStyle::ImplFn,
+ max_length: Some(25),
+ closing_brace_hints_min_lines: Some(20),
+ },
+ file_id,
+ None,
+ );
+ }
+ for &file_id in &file_ids {
+ analysis
+ .annotations(
+ &AnnotationConfig {
+ binary_target: true,
+ annotate_runnables: true,
+ annotate_impls: true,
+ annotate_references: false,
+ annotate_method_references: false,
+ annotate_enum_variant_references: false,
+ location: ide::AnnotationLocation::AboveName,
+ },
+ file_id,
+ )
+ .unwrap()
+ .into_iter()
+ .for_each(|annotation| {
+ _ = analysis.resolve_annotation(annotation);
+ });
+ }
+ let ide_time = sw.elapsed();
+ eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len());
+ }
+
fn stop_watch(&self) -> StopWatch {
StopWatch::start().memory(self.memory_usage)
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index 4306d7212..8541be715 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -7,11 +7,9 @@ use rustc_hash::FxHashSet;
use hir::{db::HirDatabase, Crate, Module};
use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity};
use ide_db::base_db::SourceDatabaseExt;
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
-use crate::cli::{
- flags,
- load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice},
-};
+use crate::cli::flags;
impl flags::Diagnostics {
pub fn run(self) -> anyhow::Result<()> {
@@ -37,14 +35,14 @@ impl flags::Diagnostics {
let mut visited_files = FxHashSet::default();
let work = all_modules(db).into_iter().filter(|module| {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
let source_root = db.file_source_root(file_id);
let source_root = db.source_root(source_root);
!source_root.is_library
});
for module in work {
- let file_id = module.definition_source(db).file_id.original_file(db);
+ let file_id = module.definition_source_file_id(db).original_file(db);
if !visited_files.contains(&file_id) {
let crate_name =
module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string();
@@ -82,7 +80,7 @@ impl flags::Diagnostics {
fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
let mut worklist: Vec<_> =
- Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ Crate::all(db).into_iter().map(|krate| krate.root_module()).collect();
let mut modules = Vec::new();
while let Some(module) = worklist.pop() {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
index 208a4e6ec..13b7f039b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -12,7 +12,7 @@ xflags::xflags! {
/// LSP server for the Rust programming language.
///
/// Subcommands and their flags do not provide any stability guarantees and may be removed or
- /// changed without notice. Top-level flags that are not are marked as [Unstable] provide
+ /// changed without notice. Top-level flags that are not marked as [Unstable] provide
/// backwards-compatibility and may be relied on.
cmd rust-analyzer {
/// Verbosity level, can be repeated multiple times.
@@ -88,6 +88,16 @@ xflags::xflags! {
optional --skip-data-layout
/// Skip const evaluation
optional --skip-const-eval
+ /// Runs several IDE features after analysis, including semantics highlighting, diagnostics
+ /// and annotations. This is useful for benchmarking the memory usage on a project that has
+ /// been worked on for a bit in a longer running session.
+ optional --run-all-ide-things
+ }
+
+ /// Run unit tests of the project using mir interpreter
+ cmd run-tests {
+ /// Directory with Cargo.toml.
+ required path: PathBuf
}
cmd diagnostics {
@@ -103,7 +113,7 @@ xflags::xflags! {
}
cmd ssr {
- /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
+ /// A structured search replace rule (`$a.foo($b) ==>> bar($a, $b)`)
repeated rule: SsrRule
}
@@ -147,6 +157,7 @@ pub enum RustAnalyzerCmd {
Symbols(Symbols),
Highlight(Highlight),
AnalysisStats(AnalysisStats),
+ RunTests(RunTests),
Diagnostics(Diagnostics),
Ssr(Ssr),
Search(Search),
@@ -182,16 +193,22 @@ pub struct AnalysisStats {
pub parallel: bool,
pub memory_usage: bool,
pub source_stats: bool,
- pub skip_lowering: bool,
- pub skip_inference: bool,
- pub skip_mir_stats: bool,
- pub skip_data_layout: bool,
- pub skip_const_eval: bool,
pub only: Option<String>,
pub with_deps: bool,
pub no_sysroot: bool,
pub disable_build_scripts: bool,
pub disable_proc_macros: bool,
+ pub skip_lowering: bool,
+ pub skip_inference: bool,
+ pub skip_mir_stats: bool,
+ pub skip_data_layout: bool,
+ pub skip_const_eval: bool,
+ pub run_all_ide_things: bool,
+}
+
+#[derive(Debug)]
+pub struct RunTests {
+ pub path: PathBuf,
}
#[derive(Debug)]
@@ -223,6 +240,7 @@ pub struct Lsif {
#[derive(Debug)]
pub struct Scip {
pub path: PathBuf,
+
pub output: Option<PathBuf>,
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
deleted file mode 100644
index 4e8f99971..000000000
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
+++ /dev/null
@@ -1,205 +0,0 @@
-//! Loads a Cargo project into a static instance of analysis, without support
-//! for incorporating changes.
-use std::path::Path;
-
-use anyhow::{anyhow, Result};
-use crossbeam_channel::{unbounded, Receiver};
-use ide::{AnalysisHost, Change};
-use ide_db::{
- base_db::{CrateGraph, ProcMacros},
- FxHashMap,
-};
-use proc_macro_api::ProcMacroServer;
-use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
-use triomphe::Arc;
-use vfs::{loader::Handle, AbsPath, AbsPathBuf};
-
-use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig};
-
-// Note: Since this type is used by external tools that use rust-analyzer as a library
-// what otherwise would be `pub(crate)` has to be `pub` here instead.
-pub struct LoadCargoConfig {
- pub load_out_dirs_from_check: bool,
- pub with_proc_macro_server: ProcMacroServerChoice,
- pub prefill_caches: bool,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum ProcMacroServerChoice {
- Sysroot,
- Explicit(AbsPathBuf),
- None,
-}
-
-// Note: Since this function is used by external tools that use rust-analyzer as a library
-// what otherwise would be `pub(crate)` has to be `pub` here instead.
-pub fn load_workspace_at(
- root: &Path,
- cargo_config: &CargoConfig,
- load_config: &LoadCargoConfig,
- progress: &dyn Fn(String),
-) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
- let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
- let root = ProjectManifest::discover_single(&root)?;
- let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
-
- if load_config.load_out_dirs_from_check {
- let build_scripts = workspace.run_build_scripts(cargo_config, progress)?;
- workspace.set_build_scripts(build_scripts)
- }
-
- load_workspace(workspace, &cargo_config.extra_env, load_config)
-}
-
-// Note: Since this function is used by external tools that use rust-analyzer as a library
-// what otherwise would be `pub(crate)` has to be `pub` here instead.
-//
-// The reason both, `load_workspace_at` and `load_workspace` are `pub` is that some of
-// these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides.
-pub fn load_workspace(
- ws: ProjectWorkspace,
- extra_env: &FxHashMap<String, String>,
- load_config: &LoadCargoConfig,
-) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
- let (sender, receiver) = unbounded();
- let mut vfs = vfs::Vfs::default();
- let mut loader = {
- let loader =
- vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
- Box::new(loader)
- };
-
- let proc_macro_server = match &load_config.with_proc_macro_server {
- ProcMacroServerChoice::Sysroot => ws
- .find_sysroot_proc_macro_srv()
- .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)),
- ProcMacroServerChoice::Explicit(path) => {
- ProcMacroServer::spawn(path.clone()).map_err(Into::into)
- }
- ProcMacroServerChoice::None => Err(anyhow!("proc macro server disabled")),
- };
-
- let (crate_graph, proc_macros) = ws.to_crate_graph(
- &mut |path: &AbsPath| {
- let contents = loader.load_sync(path);
- let path = vfs::VfsPath::from(path.to_path_buf());
- vfs.set_file_contents(path.clone(), contents);
- vfs.file_id(&path)
- },
- extra_env,
- );
- let proc_macros = {
- let proc_macro_server = match &proc_macro_server {
- Ok(it) => Ok(it),
- Err(e) => Err(e.to_string()),
- };
- proc_macros
- .into_iter()
- .map(|(crate_id, path)| {
- (
- crate_id,
- path.map_or_else(
- |_| Err("proc macro crate is missing dylib".to_owned()),
- |(_, path)| {
- proc_macro_server.as_ref().map_err(Clone::clone).and_then(
- |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]),
- )
- },
- ),
- )
- })
- .collect()
- };
-
- let project_folders = ProjectFolders::new(&[ws], &[]);
- loader.set_config(vfs::loader::Config {
- load: project_folders.load,
- watch: vec![],
- version: 0,
- });
-
- tracing::debug!("crate graph: {:?}", crate_graph);
- let host = load_crate_graph(
- crate_graph,
- proc_macros,
- project_folders.source_root_config,
- &mut vfs,
- &receiver,
- );
-
- if load_config.prefill_caches {
- host.analysis().parallel_prime_caches(1, |_| {})?;
- }
- Ok((host, vfs, proc_macro_server.ok()))
-}
-
-fn load_crate_graph(
- crate_graph: CrateGraph,
- proc_macros: ProcMacros,
- source_root_config: SourceRootConfig,
- vfs: &mut vfs::Vfs,
- receiver: &Receiver<vfs::loader::Message>,
-) -> AnalysisHost {
- let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
- let mut host = AnalysisHost::new(lru_cap);
- let mut analysis_change = Change::new();
-
- host.raw_database_mut().enable_proc_attr_macros();
-
- // wait until Vfs has loaded all roots
- for task in receiver {
- match task {
- vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => {
- if n_done == n_total {
- break;
- }
- }
- vfs::loader::Message::Loaded { files } => {
- for (path, contents) in files {
- vfs.set_file_contents(path.into(), contents);
- }
- }
- }
- }
- let changes = vfs.take_changes();
- for file in changes {
- if file.exists() {
- let contents = vfs.file_contents(file.file_id);
- if let Ok(text) = std::str::from_utf8(contents) {
- analysis_change.change_file(file.file_id, Some(Arc::from(text)))
- }
- }
- }
- let source_roots = source_root_config.partition(vfs);
- analysis_change.set_roots(source_roots);
-
- analysis_change.set_crate_graph(crate_graph);
- analysis_change.set_proc_macros(proc_macros);
-
- host.apply_change(analysis_change);
- host
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- use hir::Crate;
-
- #[test]
- fn test_loading_rust_analyzer() {
- let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
- let cargo_config = CargoConfig::default();
- let load_cargo_config = LoadCargoConfig {
- load_out_dirs_from_check: false,
- with_proc_macro_server: ProcMacroServerChoice::None,
- prefill_caches: false,
- };
- let (host, _vfs, _proc_macro) =
- load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
-
- let n_crates = Crate::all(host.raw_database()).len();
- // RA has quite a few crates, but the exact count doesn't matter
- assert!(n_crates > 20);
- }
-}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
index 7f5d08449..42d180114 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -8,23 +8,22 @@ use ide::{
Analysis, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase, StaticIndex,
StaticIndexedFile, TokenId, TokenStaticData,
};
-use ide_db::LineIndexDatabase;
-
-use ide_db::base_db::salsa::{self, ParallelDatabase};
-use ide_db::line_index::WideEncoding;
+use ide_db::{
+ base_db::salsa::{self, ParallelDatabase},
+ line_index::WideEncoding,
+ LineIndexDatabase,
+};
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use lsp_types::{self, lsif};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use vfs::{AbsPathBuf, Vfs};
-use crate::cli::load_cargo::ProcMacroServerChoice;
-use crate::cli::{
- flags,
- load_cargo::{load_workspace, LoadCargoConfig},
- Result,
+use crate::{
+ cli::flags,
+ line_index::{LineEndings, LineIndex, PositionEncoding},
+ to_proto,
+ version::version,
};
-use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
-use crate::to_proto;
-use crate::version::version;
/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
struct Snap<DB>(DB);
@@ -49,8 +48,8 @@ struct LsifManager<'a> {
struct Id(i32);
impl From<Id> for lsp_types::NumberOrString {
- fn from(Id(x): Id) -> Self {
- lsp_types::NumberOrString::Number(x)
+ fn from(Id(it): Id) -> Self {
+ lsp_types::NumberOrString::Number(it)
}
}
@@ -89,8 +88,8 @@ impl LsifManager<'_> {
}
fn get_token_id(&mut self, id: TokenId) -> Id {
- if let Some(x) = self.token_map.get(&id) {
- return *x;
+ if let Some(it) = self.token_map.get(&id) {
+ return *it;
}
let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None }));
self.token_map.insert(id, result_set_id);
@@ -98,8 +97,8 @@ impl LsifManager<'_> {
}
fn get_package_id(&mut self, package_information: PackageInformation) -> Id {
- if let Some(x) = self.package_map.get(&package_information) {
- return *x;
+ if let Some(it) = self.package_map.get(&package_information) {
+ return *it;
}
let pi = package_information.clone();
let result_set_id =
@@ -120,8 +119,8 @@ impl LsifManager<'_> {
}
fn get_range_id(&mut self, id: FileRange) -> Id {
- if let Some(x) = self.range_map.get(&id) {
- return *x;
+ if let Some(it) = self.range_map.get(&id) {
+ return *it;
}
let file_id = id.file_id;
let doc_id = self.get_file_id(file_id);
@@ -143,8 +142,8 @@ impl LsifManager<'_> {
}
fn get_file_id(&mut self, id: FileId) -> Id {
- if let Some(x) = self.file_map.get(&id) {
- return *x;
+ if let Some(it) = self.file_map.get(&id) {
+ return *it;
}
let path = self.vfs.file_path(id);
let path = path.as_path().unwrap();
@@ -217,18 +216,18 @@ impl LsifManager<'_> {
}));
let mut edges = token.references.iter().fold(
HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
- |mut edges, x| {
+ |mut edges, it| {
let entry =
- edges.entry((x.range.file_id, x.is_definition)).or_insert_with(Vec::new);
- entry.push((*self.range_map.get(&x.range).unwrap()).into());
+ edges.entry((it.range.file_id, it.is_definition)).or_insert_with(Vec::new);
+ entry.push((*self.range_map.get(&it.range).unwrap()).into());
edges
},
);
- for x in token.references {
- if let Some(vertices) = edges.remove(&(x.range.file_id, x.is_definition)) {
+ for it in token.references {
+ if let Some(vertices) = edges.remove(&(it.range.file_id, it.is_definition)) {
self.add_edge(lsif::Edge::Item(lsif::Item {
- document: (*self.file_map.get(&x.range.file_id).unwrap()).into(),
- property: Some(if x.is_definition {
+ document: (*self.file_map.get(&it.range.file_id).unwrap()).into(),
+ property: Some(if it.is_definition {
lsif::ItemKind::Definitions
} else {
lsif::ItemKind::References
@@ -286,7 +285,7 @@ impl LsifManager<'_> {
}
impl flags::Lsif {
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating LSIF started...");
let now = Instant::now();
let mut cargo_config = CargoConfig::default();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
new file mode 100644
index 000000000..e17041991
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
@@ -0,0 +1,89 @@
+//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
+
+use hir::{Crate, Module};
+use hir_ty::db::HirDatabase;
+use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
+use profile::StopWatch;
+use project_model::{CargoConfig, RustLibSource};
+use syntax::TextRange;
+
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+
+use crate::cli::{flags, full_name_of_item, Result};
+
+impl flags::RunTests {
+ pub fn run(self) -> Result<()> {
+ let mut cargo_config = CargoConfig::default();
+ cargo_config.sysroot = Some(RustLibSource::Discover);
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?;
+ let db = host.raw_database();
+
+ let tests = all_modules(db)
+ .into_iter()
+ .flat_map(|x| x.declarations(db))
+ .filter_map(|x| match x {
+ hir::ModuleDef::Function(f) => Some(f),
+ _ => None,
+ })
+ .filter(|x| x.is_test(db));
+ let span_formatter = |file_id, text_range: TextRange| {
+ let line_col = match db.line_index(file_id).try_line_col(text_range.start()) {
+ None => " (unknown line col)".to_string(),
+ Some(x) => format!("#{}:{}", x.line + 1, x.col),
+ };
+ let path = &db
+ .source_root(db.file_source_root(file_id))
+ .path_for_file(&file_id)
+ .map(|x| x.to_string());
+ let path = path.as_deref().unwrap_or("<unknown file>");
+ format!("file://{path}{line_col}")
+ };
+ let mut pass_count = 0;
+ let mut ignore_count = 0;
+ let mut fail_count = 0;
+ let mut sw_all = StopWatch::start();
+ for test in tests {
+ let full_name = full_name_of_item(db, test.module(db), test.name(db));
+ println!("test {}", full_name);
+ if test.is_ignore(db) {
+ println!("ignored");
+ ignore_count += 1;
+ continue;
+ }
+ let mut sw_one = StopWatch::start();
+ let result = test.eval(db, span_formatter);
+ if result.trim() == "pass" {
+ pass_count += 1;
+ } else {
+ fail_count += 1;
+ }
+ println!("{}", result);
+ eprintln!("{:<20} {}", format!("test {}", full_name), sw_one.elapsed());
+ }
+ println!("{pass_count} passed, {fail_count} failed, {ignore_count} ignored");
+ eprintln!("{:<20} {}", "All tests", sw_all.elapsed());
+ Ok(())
+ }
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> = Crate::all(db)
+ .into_iter()
+ .filter(|x| x.origin(db).is_local())
+ .map(|krate| krate.root_module())
+ .collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index b0b724bdf..44337f955 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -6,27 +6,23 @@ use std::{
time::Instant,
};
-use crate::{
- cli::load_cargo::ProcMacroServerChoice,
- line_index::{LineEndings, LineIndex, PositionEncoding},
-};
use ide::{
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
TokenStaticData,
};
use ide_db::LineIndexDatabase;
+use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
use scip::types as scip_types;
use std::env;
-use crate::cli::{
- flags,
- load_cargo::{load_workspace, LoadCargoConfig},
- Result,
+use crate::{
+ cli::flags,
+ line_index::{LineEndings, LineIndex, PositionEncoding},
};
impl flags::Scip {
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
eprintln!("Generating SCIP start...");
let now = Instant::now();
let mut cargo_config = CargoConfig::default();
@@ -65,7 +61,7 @@ impl flags::Scip {
path.normalize()
.as_os_str()
.to_str()
- .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
+ .ok_or(anyhow::format_err!("Unable to normalize project_root path"))?
),
text_document_encoding: scip_types::TextEncoding::UTF8.into(),
special_fields: Default::default(),
@@ -168,7 +164,7 @@ impl flags::Scip {
let out_path = self.output.unwrap_or_else(|| PathBuf::from(r"index.scip"));
scip::write_message_to_file(out_path, index)
- .map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
+ .map_err(|err| anyhow::format_err!("Failed to write scip to file: {}", err))?;
eprintln!("Generating SCIP finished {:?}", now.elapsed());
Ok(())
@@ -276,7 +272,7 @@ mod test {
let change_fixture = ChangeFixture::parse(ra_fixture);
host.raw_database_mut().apply_change(change_fixture.change);
let (file_id, range_or_offset) =
- change_fixture.file_position.expect("expected a marker ($0)");
+ change_fixture.file_position.expect("expected a marker ()");
let offset = range_or_offset.expect_offset();
(host, FilePosition { file_id, offset })
}
@@ -325,7 +321,7 @@ use foo::example_mod::func;
fn main() {
func$0();
}
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod example_mod {
pub fn func() {}
}
@@ -338,7 +334,7 @@ pub mod example_mod {
fn symbol_for_trait() {
check_symbol(
r#"
-//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func$0() {}
@@ -353,7 +349,7 @@ pub mod module {
fn symbol_for_trait_constant() {
check_symbol(
r#"
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
const MY_CONST$0: u8;
@@ -368,7 +364,7 @@ pub mod module {
fn symbol_for_trait_type() {
check_symbol(
r#"
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
type MyType$0;
@@ -384,7 +380,7 @@ pub mod module {
fn symbol_for_trait_impl_function() {
check_symbol(
r#"
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub trait MyTrait {
pub fn func() {}
@@ -411,7 +407,7 @@ pub mod module {
fn main() {
let x = St { a$0: 2 };
}
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub struct St {
pub a: i32,
}
@@ -421,6 +417,44 @@ pub mod module {
}
#[test]
+ fn symbol_for_param() {
+ check_symbol(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::example_mod::func;
+fn main() {
+ func(42);
+}
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
+pub mod example_mod {
+ pub fn func(x$0: usize) {}
+}
+"#,
+ "rust-analyzer cargo foo 0.1.0 example_mod/func().(x)",
+ );
+ }
+
+ #[test]
+ fn symbol_for_closure_param() {
+ check_symbol(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::example_mod::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
+pub mod example_mod {
+ pub fn func() {
+ let f = |x$0: usize| {};
+ }
+}
+"#,
+ "rust-analyzer cargo foo 0.1.0 example_mod/func().(x)",
+ );
+ }
+
+ #[test]
fn local_symbol_for_local() {
check_symbol(
r#"
@@ -429,7 +463,7 @@ pub mod module {
fn main() {
func();
}
- //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
pub mod module {
pub fn func() {
let x$0 = 2;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index 82a769347..f87dcb889 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -1,16 +1,14 @@
//! Applies structured search replace rules from the command line.
+use anyhow::Context;
use ide_ssr::MatchFinder;
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
use project_model::{CargoConfig, RustLibSource};
-use crate::cli::{
- flags,
- load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice},
- Result,
-};
+use crate::cli::flags;
impl flags::Ssr {
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt;
let mut cargo_config = CargoConfig::default();
cargo_config.sysroot = Some(RustLibSource::Discover);
@@ -35,7 +33,8 @@ impl flags::Ssr {
if let Some(path) = vfs.file_path(file_id).as_path() {
let mut contents = db.file_text(file_id).to_string();
edit.apply(&mut contents);
- std::fs::write(path, contents)?;
+ std::fs::write(path, contents)
+ .with_context(|| format!("failed to write {path}"))?;
}
}
Ok(())
@@ -46,7 +45,7 @@ impl flags::Search {
/// Searches for `patterns`, printing debug information for any nodes whose text exactly matches
/// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
/// for much else.
- pub fn run(self) -> Result<()> {
+ pub fn run(self) -> anyhow::Result<()> {
use ide_db::base_db::SourceDatabaseExt;
use ide_db::symbol_index::SymbolsDatabase;
let cargo_config = CargoConfig::default();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 6355c620f..fa20c796e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -1079,6 +1079,7 @@ impl Config {
pub fn diagnostics(&self) -> DiagnosticsConfig {
DiagnosticsConfig {
+ enabled: self.data.diagnostics_enable,
proc_attr_macros_enabled: self.expand_proc_attr_macros(),
proc_macros_enabled: self.data.procMacro_enable,
disable_experimental: !self.data.diagnostics_experimental_enable,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
index 4e57c6eb6..5e5cd9a02 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs
@@ -10,7 +10,7 @@ use crate::{
global_state::{GlobalState, GlobalStateSnapshot},
main_loop::Task,
version::version,
- LspError, Result,
+ LspError,
};
/// A visitor for routing a raw JSON request to an appropriate handler function.
@@ -32,13 +32,13 @@ pub(crate) struct RequestDispatcher<'a> {
pub(crate) global_state: &'a mut GlobalState,
}
-impl<'a> RequestDispatcher<'a> {
+impl RequestDispatcher<'_> {
/// Dispatches the request onto the current thread, given full access to
/// mutable global state. Unlike all other methods here, this one isn't
/// guarded by `catch_unwind`, so, please, don't make bugs :-)
pub(crate) fn on_sync_mut<R>(
&mut self,
- f: fn(&mut GlobalState, R::Params) -> Result<R::Result>,
+ f: fn(&mut GlobalState, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request,
@@ -63,7 +63,7 @@ impl<'a> RequestDispatcher<'a> {
/// Dispatches the request onto the current thread.
pub(crate) fn on_sync<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request,
@@ -92,7 +92,7 @@ impl<'a> RequestDispatcher<'a> {
/// without retrying it if it panics.
pub(crate) fn on_no_retry<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -128,7 +128,7 @@ impl<'a> RequestDispatcher<'a> {
/// Dispatches a non-latency-sensitive request onto the thread pool.
pub(crate) fn on<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -141,7 +141,7 @@ impl<'a> RequestDispatcher<'a> {
/// Dispatches a latency-sensitive request onto the thread pool.
pub(crate) fn on_latency_sensitive<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -156,7 +156,7 @@ impl<'a> RequestDispatcher<'a> {
/// We can't run this on the main thread though as we invoke rustfmt which may take arbitrary time to complete!
pub(crate) fn on_fmt_thread<R>(
&mut self,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -181,7 +181,7 @@ impl<'a> RequestDispatcher<'a> {
fn on_with_thread_intent<const MAIN_POOL: bool, R>(
&mut self,
intent: ThreadIntent,
- f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>,
+ f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
) -> &mut Self
where
R: lsp_types::request::Request + 'static,
@@ -245,7 +245,7 @@ impl<'a> RequestDispatcher<'a> {
fn thread_result_to_response<R>(
id: lsp_server::RequestId,
- result: thread::Result<Result<R::Result>>,
+ result: thread::Result<anyhow::Result<R::Result>>,
) -> Result<lsp_server::Response, Cancelled>
where
R: lsp_types::request::Request,
@@ -277,7 +277,7 @@ where
fn result_to_response<R>(
id: lsp_server::RequestId,
- result: Result<R::Result>,
+ result: anyhow::Result<R::Result>,
) -> Result<lsp_server::Response, Cancelled>
where
R: lsp_types::request::Request,
@@ -289,7 +289,7 @@ where
Err(e) => match e.downcast::<LspError>() {
Ok(lsp_error) => lsp_server::Response::new_err(id, lsp_error.code, lsp_error.message),
Err(e) => match e.downcast::<Cancelled>() {
- Ok(cancelled) => return Err(*cancelled),
+ Ok(cancelled) => return Err(cancelled),
Err(e) => lsp_server::Response::new_err(
id,
lsp_server::ErrorCode::InternalError as i32,
@@ -306,11 +306,11 @@ pub(crate) struct NotificationDispatcher<'a> {
pub(crate) global_state: &'a mut GlobalState,
}
-impl<'a> NotificationDispatcher<'a> {
+impl NotificationDispatcher<'_> {
pub(crate) fn on_sync_mut<N>(
&mut self,
- f: fn(&mut GlobalState, N::Params) -> Result<()>,
- ) -> Result<&mut Self>
+ f: fn(&mut GlobalState, N::Params) -> anyhow::Result<()>,
+ ) -> anyhow::Result<&mut Self>
where
N: lsp_types::notification::Notification,
N::Params: DeserializeOwned + Send,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
index cd74a5500..c247e1bb2 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs
@@ -14,19 +14,21 @@ use crate::{
line_index::{LineIndex, PositionEncoding},
lsp_ext,
lsp_utils::invalid_params_error,
- Result,
};
-pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
- let path = url.to_file_path().map_err(|()| "url is not a file")?;
+pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> {
+ let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?;
Ok(AbsPathBuf::try_from(path).unwrap())
}
-pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
+pub(crate) fn vfs_path(url: &lsp_types::Url) -> anyhow::Result<vfs::VfsPath> {
abs_path(url).map(vfs::VfsPath::from)
}
-pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
+pub(crate) fn offset(
+ line_index: &LineIndex,
+ position: lsp_types::Position,
+) -> anyhow::Result<TextSize> {
let line_col = match line_index.encoding {
PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character },
PositionEncoding::Wide(enc) => {
@@ -42,7 +44,10 @@ pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> R
Ok(text_size)
}
-pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result<TextRange> {
+pub(crate) fn text_range(
+ line_index: &LineIndex,
+ range: lsp_types::Range,
+) -> anyhow::Result<TextRange> {
let start = offset(line_index, range.start)?;
let end = offset(line_index, range.end)?;
match end < start {
@@ -51,14 +56,14 @@ pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Res
}
}
-pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
+pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> anyhow::Result<FileId> {
snap.url_to_file_id(url)
}
pub(crate) fn file_position(
snap: &GlobalStateSnapshot,
tdpp: lsp_types::TextDocumentPositionParams,
-) -> Result<FilePosition> {
+) -> anyhow::Result<FilePosition> {
let file_id = file_id(snap, &tdpp.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
let offset = offset(&line_index, tdpp.position)?;
@@ -69,7 +74,7 @@ pub(crate) fn file_range(
snap: &GlobalStateSnapshot,
text_document_identifier: lsp_types::TextDocumentIdentifier,
range: lsp_types::Range,
-) -> Result<FileRange> {
+) -> anyhow::Result<FileRange> {
file_range_uri(snap, &text_document_identifier.uri, range)
}
@@ -77,7 +82,7 @@ pub(crate) fn file_range_uri(
snap: &GlobalStateSnapshot,
document: &lsp_types::Url,
range: lsp_types::Range,
-) -> Result<FileRange> {
+) -> anyhow::Result<FileRange> {
let file_id = file_id(snap, document)?;
let line_index = snap.file_line_index(file_id)?;
let range = text_range(&line_index, range)?;
@@ -101,7 +106,7 @@ pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind>
pub(crate) fn annotation(
snap: &GlobalStateSnapshot,
code_lens: lsp_types::CodeLens,
-) -> Result<Option<Annotation>> {
+) -> anyhow::Result<Option<Annotation>> {
let data =
code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?;
let resolve = from_json::<lsp_ext::CodeLensResolveData>("CodeLensResolveData", &data)?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index d5b0e3a57..ea8a69751 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -9,6 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase};
+use load_cargo::SourceRootConfig;
use lsp_types::{SemanticTokens, Url};
use nohash_hasher::IntMap;
use parking_lot::{Mutex, RwLock};
@@ -27,10 +28,9 @@ use crate::{
main_loop::Task,
mem_docs::MemDocs,
op_queue::OpQueue,
- reload::{self, SourceRootConfig},
+ reload,
task_pool::TaskPool,
to_proto::url_from_abs_path,
- Result,
};
// Enforces drop order
@@ -319,7 +319,7 @@ impl GlobalState {
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029
if let Some((path, force_crate_graph_reload)) = workspace_structure_change {
self.fetch_workspaces_queue.request_op(
- format!("workspace vfs file change: {}", path.display()),
+ format!("workspace vfs file change: {path}"),
force_crate_graph_reload,
);
}
@@ -422,7 +422,7 @@ impl Drop for GlobalState {
}
impl GlobalStateSnapshot {
- pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
+ pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> {
url_to_file_id(&self.vfs.read().0, url)
}
@@ -481,8 +481,8 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
url_from_abs_path(path)
}
-pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
+pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<FileId> {
let path = from_proto::vfs_path(url)?;
- let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {path}"))?;
+ let res = vfs.file_id(&path).ok_or_else(|| anyhow::format_err!("file not found: {path}"))?;
Ok(res)
}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
index ae1dc2315..e830e5e9a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
@@ -14,10 +14,10 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath};
use crate::{
config::Config, from_proto, global_state::GlobalState, lsp_ext::RunFlycheckParams,
- lsp_utils::apply_document_changes, mem_docs::DocumentData, reload, Result,
+ lsp_utils::apply_document_changes, mem_docs::DocumentData, reload,
};
-pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Result<()> {
+pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> {
let id: lsp_server::RequestId = match params.id {
lsp_types::NumberOrString::Number(id) => id.into(),
lsp_types::NumberOrString::String(id) => id.into(),
@@ -29,7 +29,7 @@ pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Re
pub(crate) fn handle_work_done_progress_cancel(
state: &mut GlobalState,
params: WorkDoneProgressCancelParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
if let lsp_types::NumberOrString::String(s) = &params.token {
if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") {
if let Ok(id) = u32::from_str_radix(id, 10) {
@@ -49,7 +49,7 @@ pub(crate) fn handle_work_done_progress_cancel(
pub(crate) fn handle_did_open_text_document(
state: &mut GlobalState,
params: DidOpenTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_did_open_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
@@ -68,7 +68,7 @@ pub(crate) fn handle_did_open_text_document(
pub(crate) fn handle_did_change_text_document(
state: &mut GlobalState,
params: DidChangeTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_did_change_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
@@ -100,7 +100,7 @@ pub(crate) fn handle_did_change_text_document(
pub(crate) fn handle_did_close_text_document(
state: &mut GlobalState,
params: DidCloseTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_did_close_text_document");
if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
@@ -120,14 +120,14 @@ pub(crate) fn handle_did_close_text_document(
pub(crate) fn handle_did_save_text_document(
state: &mut GlobalState,
params: DidSaveTextDocumentParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) {
state
.fetch_workspaces_queue
- .request_op(format!("DidSaveTextDocument {}", abs_path.display()), false);
+ .request_op(format!("DidSaveTextDocument {abs_path}"), false);
}
}
@@ -146,7 +146,7 @@ pub(crate) fn handle_did_save_text_document(
pub(crate) fn handle_did_change_configuration(
state: &mut GlobalState,
_params: DidChangeConfigurationParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
// As stated in https://github.com/microsoft/language-server-protocol/issues/676,
// this notification's parameters should be ignored and the actual config queried separately.
state.send_request::<lsp_types::request::WorkspaceConfiguration>(
@@ -186,7 +186,7 @@ pub(crate) fn handle_did_change_configuration(
pub(crate) fn handle_did_change_workspace_folders(
state: &mut GlobalState,
params: DidChangeWorkspaceFoldersParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let config = Arc::make_mut(&mut state.config);
for workspace in params.event.removed {
@@ -214,7 +214,7 @@ pub(crate) fn handle_did_change_workspace_folders(
pub(crate) fn handle_did_change_watched_files(
state: &mut GlobalState,
params: DidChangeWatchedFilesParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
for change in params.changes {
if let Ok(path) = from_proto::abs_path(&change.uri) {
state.loader.handle.invalidate(path);
@@ -302,13 +302,13 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
}
}
-pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
let _p = profile::span("handle_stop_flycheck");
state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
Ok(())
}
-pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
let _p = profile::span("handle_clear_flycheck");
state.diagnostics.clear_check_all();
Ok(())
@@ -317,7 +317,7 @@ pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<()
pub(crate) fn handle_run_flycheck(
state: &mut GlobalState,
params: RunFlycheckParams,
-) -> Result<()> {
+) -> anyhow::Result<()> {
let _p = profile::span("handle_run_flycheck");
if let Some(text_document) = params.text_document {
if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index a6a72552d..5f1f731cf 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -8,6 +8,7 @@ use std::{
};
use anyhow::Context;
+
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind,
@@ -20,9 +21,9 @@ use lsp_types::{
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
CodeLens, CompletionItem, FoldingRange, FoldingRangeParams, HoverContents, InlayHint,
InlayHintParams, Location, LocationLink, Position, PrepareRenameResponse, Range, RenameParams,
- SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
- SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
- SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
+ ResourceOp, ResourceOperationKind, SemanticTokensDeltaParams, SemanticTokensFullDeltaResult,
+ SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
+ SemanticTokensResult, SymbolInformation, SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
};
use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
use serde_json::json;
@@ -33,7 +34,7 @@ use vfs::{AbsPath, AbsPathBuf, VfsPath};
use crate::{
cargo_target_spec::CargoTargetSpec,
- config::{RustfmtConfig, WorkspaceSymbolConfig},
+ config::{Config, RustfmtConfig, WorkspaceSymbolConfig},
diff::diff,
from_proto,
global_state::{GlobalState, GlobalStateSnapshot},
@@ -43,10 +44,10 @@ use crate::{
FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams,
},
lsp_utils::{all_edits_are_disjoint, invalid_params_error},
- to_proto, LspError, Result,
+ to_proto, LspError,
};
-pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
// FIXME: use `Arc::from_iter` when it becomes available
state.proc_macro_clients = Arc::from(Vec::new());
state.proc_macro_changed = false;
@@ -55,7 +56,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<
Ok(())
}
-pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
// FIXME: use `Arc::from_iter` when it becomes available
state.proc_macro_clients = Arc::from(Vec::new());
state.proc_macro_changed = false;
@@ -67,7 +68,7 @@ pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Resu
pub(crate) fn handle_analyzer_status(
snap: GlobalStateSnapshot,
params: lsp_ext::AnalyzerStatusParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_analyzer_status");
let mut buf = String::new();
@@ -112,7 +113,7 @@ pub(crate) fn handle_analyzer_status(
Ok(buf)
}
-pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
+pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Result<String> {
let _p = profile::span("handle_memory_usage");
let mem = state.analysis_host.per_query_memory_usage();
@@ -125,7 +126,7 @@ pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<Stri
Ok(out)
}
-pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> {
+pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
state.analysis_host.shuffle_crate_graph();
Ok(())
}
@@ -133,7 +134,7 @@ pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Resu
pub(crate) fn handle_syntax_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::SyntaxTreeParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_syntax_tree");
let id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(id)?;
@@ -145,7 +146,7 @@ pub(crate) fn handle_syntax_tree(
pub(crate) fn handle_view_hir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_hir");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_hir(position)?;
@@ -155,7 +156,7 @@ pub(crate) fn handle_view_hir(
pub(crate) fn handle_view_mir(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_mir");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.view_mir(position)?;
@@ -165,7 +166,7 @@ pub(crate) fn handle_view_mir(
pub(crate) fn handle_interpret_function(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_interpret_function");
let position = from_proto::file_position(&snap, params)?;
let res = snap.analysis.interpret_function(position)?;
@@ -175,7 +176,7 @@ pub(crate) fn handle_interpret_function(
pub(crate) fn handle_view_file_text(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentIdentifier,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let file_id = from_proto::file_id(&snap, &params.uri)?;
Ok(snap.analysis.file_text(file_id)?.to_string())
}
@@ -183,7 +184,7 @@ pub(crate) fn handle_view_file_text(
pub(crate) fn handle_view_item_tree(
snap: GlobalStateSnapshot,
params: lsp_ext::ViewItemTreeParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_item_tree");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let res = snap.analysis.view_item_tree(file_id)?;
@@ -193,16 +194,16 @@ pub(crate) fn handle_view_item_tree(
pub(crate) fn handle_view_crate_graph(
snap: GlobalStateSnapshot,
params: ViewCrateGraphParams,
-) -> Result<String> {
+) -> anyhow::Result<String> {
let _p = profile::span("handle_view_crate_graph");
- let dot = snap.analysis.view_crate_graph(params.full)??;
+ let dot = snap.analysis.view_crate_graph(params.full)?.map_err(anyhow::Error::msg)?;
Ok(dot)
}
pub(crate) fn handle_expand_macro(
snap: GlobalStateSnapshot,
params: lsp_ext::ExpandMacroParams,
-) -> Result<Option<lsp_ext::ExpandedMacro>> {
+) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> {
let _p = profile::span("handle_expand_macro");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -215,11 +216,11 @@ pub(crate) fn handle_expand_macro(
pub(crate) fn handle_selection_range(
snap: GlobalStateSnapshot,
params: lsp_types::SelectionRangeParams,
-) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> {
let _p = profile::span("handle_selection_range");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
- let res: Result<Vec<lsp_types::SelectionRange>> = params
+ let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params
.positions
.into_iter()
.map(|position| {
@@ -258,7 +259,7 @@ pub(crate) fn handle_selection_range(
pub(crate) fn handle_matching_brace(
snap: GlobalStateSnapshot,
params: lsp_ext::MatchingBraceParams,
-) -> Result<Vec<Position>> {
+) -> anyhow::Result<Vec<Position>> {
let _p = profile::span("handle_matching_brace");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -281,7 +282,7 @@ pub(crate) fn handle_matching_brace(
pub(crate) fn handle_join_lines(
snap: GlobalStateSnapshot,
params: lsp_ext::JoinLinesParams,
-) -> Result<Vec<lsp_types::TextEdit>> {
+) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
let _p = profile::span("handle_join_lines");
let config = snap.config.join_lines();
@@ -306,7 +307,7 @@ pub(crate) fn handle_join_lines(
pub(crate) fn handle_on_enter(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
let _p = profile::span("handle_on_enter");
let position = from_proto::file_position(&snap, params)?;
let edit = match snap.analysis.on_enter(position)? {
@@ -321,7 +322,7 @@ pub(crate) fn handle_on_enter(
pub(crate) fn handle_on_type_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentOnTypeFormattingParams,
-) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
let _p = profile::span("handle_on_type_formatting");
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
let line_index = snap.file_line_index(position.file_id)?;
@@ -352,7 +353,8 @@ pub(crate) fn handle_on_type_formatting(
};
// This should be a single-file edit
- let (_, text_edit) = edit.source_file_edits.into_iter().next().unwrap();
+ let (_, (text_edit, snippet_edit)) = edit.source_file_edits.into_iter().next().unwrap();
+ stdx::never!(snippet_edit.is_none(), "on type formatting shouldn't use structured snippets");
let change = to_proto::snippet_text_edit_vec(&line_index, edit.is_snippet, text_edit);
Ok(Some(change))
@@ -361,7 +363,7 @@ pub(crate) fn handle_on_type_formatting(
pub(crate) fn handle_document_symbol(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentSymbolParams,
-) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
+) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> {
let _p = profile::span("handle_document_symbol");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -450,7 +452,7 @@ pub(crate) fn handle_document_symbol(
pub(crate) fn handle_workspace_symbol(
snap: GlobalStateSnapshot,
params: WorkspaceSymbolParams,
-) -> Result<Option<Vec<SymbolInformation>>> {
+) -> anyhow::Result<Option<Vec<SymbolInformation>>> {
let _p = profile::span("handle_workspace_symbol");
let config = snap.config.workspace_symbol();
@@ -513,7 +515,10 @@ pub(crate) fn handle_workspace_symbol(
(all_symbols, libs)
}
- fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
+ fn exec_query(
+ snap: &GlobalStateSnapshot,
+ query: Query,
+ ) -> anyhow::Result<Vec<SymbolInformation>> {
let mut res = Vec::new();
for nav in snap.analysis.symbol_search(query)? {
let container_name = nav.container_name.as_ref().map(|v| v.to_string());
@@ -542,7 +547,7 @@ pub(crate) fn handle_workspace_symbol(
pub(crate) fn handle_will_rename_files(
snap: GlobalStateSnapshot,
params: lsp_types::RenameFilesParams,
-) -> Result<Option<lsp_types::WorkspaceEdit>> {
+) -> anyhow::Result<Option<lsp_types::WorkspaceEdit>> {
let _p = profile::span("handle_will_rename_files");
let source_changes: Vec<SourceChange> = params
@@ -604,7 +609,7 @@ pub(crate) fn handle_will_rename_files(
pub(crate) fn handle_goto_definition(
snap: GlobalStateSnapshot,
params: lsp_types::GotoDefinitionParams,
-) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile::span("handle_goto_definition");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_definition(position)? {
@@ -619,7 +624,7 @@ pub(crate) fn handle_goto_definition(
pub(crate) fn handle_goto_declaration(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoDeclarationParams,
-) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> {
+) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> {
let _p = profile::span("handle_goto_declaration");
let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
let nav_info = match snap.analysis.goto_declaration(position)? {
@@ -634,7 +639,7 @@ pub(crate) fn handle_goto_declaration(
pub(crate) fn handle_goto_implementation(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoImplementationParams,
-) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
+) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> {
let _p = profile::span("handle_goto_implementation");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_implementation(position)? {
@@ -649,7 +654,7 @@ pub(crate) fn handle_goto_implementation(
pub(crate) fn handle_goto_type_definition(
snap: GlobalStateSnapshot,
params: lsp_types::request::GotoTypeDefinitionParams,
-) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
let _p = profile::span("handle_goto_type_definition");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let nav_info = match snap.analysis.goto_type_definition(position)? {
@@ -664,7 +669,7 @@ pub(crate) fn handle_goto_type_definition(
pub(crate) fn handle_parent_module(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile::span("handle_parent_module");
if let Ok(file_path) = &params.text_document.uri.to_file_path() {
if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
@@ -731,7 +736,7 @@ pub(crate) fn handle_parent_module(
pub(crate) fn handle_runnables(
snap: GlobalStateSnapshot,
params: lsp_ext::RunnablesParams,
-) -> Result<Vec<lsp_ext::Runnable>> {
+) -> anyhow::Result<Vec<lsp_ext::Runnable>> {
let _p = profile::span("handle_runnables");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let line_index = snap.file_line_index(file_id)?;
@@ -826,7 +831,7 @@ fn should_skip_for_offset(runnable: &Runnable, offset: Option<TextSize>) -> bool
pub(crate) fn handle_related_tests(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Vec<lsp_ext::TestInfo>> {
+) -> anyhow::Result<Vec<lsp_ext::TestInfo>> {
let _p = profile::span("handle_related_tests");
let position = from_proto::file_position(&snap, params)?;
@@ -844,7 +849,7 @@ pub(crate) fn handle_related_tests(
pub(crate) fn handle_completion(
snap: GlobalStateSnapshot,
params: lsp_types::CompletionParams,
-) -> Result<Option<lsp_types::CompletionResponse>> {
+) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
let _p = profile::span("handle_completion");
let text_document_position = params.text_document_position.clone();
let position = from_proto::file_position(&snap, params.text_document_position)?;
@@ -872,7 +877,7 @@ pub(crate) fn handle_completion(
pub(crate) fn handle_completion_resolve(
snap: GlobalStateSnapshot,
mut original_completion: CompletionItem,
-) -> Result<CompletionItem> {
+) -> anyhow::Result<CompletionItem> {
let _p = profile::span("handle_completion_resolve");
if !all_edits_are_disjoint(&original_completion, &[]) {
@@ -928,7 +933,7 @@ pub(crate) fn handle_completion_resolve(
pub(crate) fn handle_folding_range(
snap: GlobalStateSnapshot,
params: FoldingRangeParams,
-) -> Result<Option<Vec<FoldingRange>>> {
+) -> anyhow::Result<Option<Vec<FoldingRange>>> {
let _p = profile::span("handle_folding_range");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let folds = snap.analysis.folding_ranges(file_id)?;
@@ -945,7 +950,7 @@ pub(crate) fn handle_folding_range(
pub(crate) fn handle_signature_help(
snap: GlobalStateSnapshot,
params: lsp_types::SignatureHelpParams,
-) -> Result<Option<lsp_types::SignatureHelp>> {
+) -> anyhow::Result<Option<lsp_types::SignatureHelp>> {
let _p = profile::span("handle_signature_help");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let help = match snap.analysis.signature_help(position)? {
@@ -960,7 +965,7 @@ pub(crate) fn handle_signature_help(
pub(crate) fn handle_hover(
snap: GlobalStateSnapshot,
params: lsp_ext::HoverParams,
-) -> Result<Option<lsp_ext::Hover>> {
+) -> anyhow::Result<Option<lsp_ext::Hover>> {
let _p = profile::span("handle_hover");
let range = match params.position {
PositionOrRange::Position(position) => Range::new(position, position),
@@ -997,7 +1002,7 @@ pub(crate) fn handle_hover(
pub(crate) fn handle_prepare_rename(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<Option<PrepareRenameResponse>> {
+) -> anyhow::Result<Option<PrepareRenameResponse>> {
let _p = profile::span("handle_prepare_rename");
let position = from_proto::file_position(&snap, params)?;
@@ -1011,7 +1016,7 @@ pub(crate) fn handle_prepare_rename(
pub(crate) fn handle_rename(
snap: GlobalStateSnapshot,
params: RenameParams,
-) -> Result<Option<WorkspaceEdit>> {
+) -> anyhow::Result<Option<WorkspaceEdit>> {
let _p = profile::span("handle_rename");
let position = from_proto::file_position(&snap, params.text_document_position)?;
@@ -1027,14 +1032,30 @@ pub(crate) fn handle_rename(
if !change.file_system_edits.is_empty() && snap.config.will_rename() {
change.source_file_edits.clear();
}
+
let workspace_edit = to_proto::workspace_edit(&snap, change)?;
+
+ if let Some(lsp_types::DocumentChanges::Operations(ops)) =
+ workspace_edit.document_changes.as_ref()
+ {
+ for op in ops {
+ if let lsp_types::DocumentChangeOperation::Op(doc_change_op) = op {
+ if let Err(err) =
+ resource_ops_supported(&snap.config, resolve_resource_op(doc_change_op))
+ {
+ return Err(err);
+ }
+ }
+ }
+ }
+
Ok(Some(workspace_edit))
}
pub(crate) fn handle_references(
snap: GlobalStateSnapshot,
params: lsp_types::ReferenceParams,
-) -> Result<Option<Vec<Location>>> {
+) -> anyhow::Result<Option<Vec<Location>>> {
let _p = profile::span("handle_references");
let position = from_proto::file_position(&snap, params.text_document_position)?;
@@ -1077,7 +1098,7 @@ pub(crate) fn handle_references(
pub(crate) fn handle_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentFormattingParams,
-) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile::span("handle_formatting");
run_rustfmt(&snap, params.text_document, None)
@@ -1086,7 +1107,7 @@ pub(crate) fn handle_formatting(
pub(crate) fn handle_range_formatting(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentRangeFormattingParams,
-) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
let _p = profile::span("handle_range_formatting");
run_rustfmt(&snap, params.text_document, Some(params.range))
@@ -1095,7 +1116,7 @@ pub(crate) fn handle_range_formatting(
pub(crate) fn handle_code_action(
snap: GlobalStateSnapshot,
params: lsp_types::CodeActionParams,
-) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
+) -> anyhow::Result<Option<Vec<lsp_ext::CodeAction>>> {
let _p = profile::span("handle_code_action");
if !snap.config.code_action_literals() {
@@ -1134,6 +1155,21 @@ pub(crate) fn handle_code_action(
let resolve_data =
if code_action_resolve_cap { Some((index, params.clone())) } else { None };
let code_action = to_proto::code_action(&snap, assist, resolve_data)?;
+
+ // Check if the client supports the necessary `ResourceOperation`s.
+ let changes = code_action.edit.as_ref().and_then(|it| it.document_changes.as_ref());
+ if let Some(changes) = changes {
+ for change in changes {
+ if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change {
+ if let Err(err) =
+ resource_ops_supported(&snap.config, resolve_resource_op(res_op))
+ {
+ return Err(err);
+ }
+ }
+ }
+ }
+
res.push(code_action)
}
@@ -1158,7 +1194,7 @@ pub(crate) fn handle_code_action(
pub(crate) fn handle_code_action_resolve(
snap: GlobalStateSnapshot,
mut code_action: lsp_ext::CodeAction,
-) -> Result<lsp_ext::CodeAction> {
+) -> anyhow::Result<lsp_ext::CodeAction> {
let _p = profile::span("handle_code_action_resolve");
let params = match code_action.data.take() {
Some(it) => it,
@@ -1216,10 +1252,25 @@ pub(crate) fn handle_code_action_resolve(
let ca = to_proto::code_action(&snap, assist.clone(), None)?;
code_action.edit = ca.edit;
code_action.command = ca.command;
+
+ if let Some(edit) = code_action.edit.as_ref() {
+ if let Some(changes) = edit.document_changes.as_ref() {
+ for change in changes {
+ if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change {
+ if let Err(err) =
+ resource_ops_supported(&snap.config, resolve_resource_op(res_op))
+ {
+ return Err(err);
+ }
+ }
+ }
+ }
+ }
+
Ok(code_action)
}
-fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
+fn parse_action_id(action_id: &str) -> anyhow::Result<(usize, SingleResolve), String> {
let id_parts = action_id.split(':').collect::<Vec<_>>();
match id_parts.as_slice() {
[assist_id_string, assist_kind_string, index_string] => {
@@ -1237,7 +1288,7 @@ fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
pub(crate) fn handle_code_lens(
snap: GlobalStateSnapshot,
params: lsp_types::CodeLensParams,
-) -> Result<Option<Vec<CodeLens>>> {
+) -> anyhow::Result<Option<Vec<CodeLens>>> {
let _p = profile::span("handle_code_lens");
let lens_config = snap.config.lens();
@@ -1280,8 +1331,10 @@ pub(crate) fn handle_code_lens(
pub(crate) fn handle_code_lens_resolve(
snap: GlobalStateSnapshot,
code_lens: CodeLens,
-) -> Result<CodeLens> {
- let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else { return Ok(code_lens) };
+) -> anyhow::Result<CodeLens> {
+ let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else {
+ return Ok(code_lens);
+ };
let annotation = snap.analysis.resolve_annotation(annotation)?;
let mut acc = Vec::new();
@@ -1301,7 +1354,7 @@ pub(crate) fn handle_code_lens_resolve(
pub(crate) fn handle_document_highlight(
snap: GlobalStateSnapshot,
params: lsp_types::DocumentHighlightParams,
-) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> {
let _p = profile::span("handle_document_highlight");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
let line_index = snap.file_line_index(position.file_id)?;
@@ -1323,7 +1376,7 @@ pub(crate) fn handle_document_highlight(
pub(crate) fn handle_ssr(
snap: GlobalStateSnapshot,
params: lsp_ext::SsrParams,
-) -> Result<lsp_types::WorkspaceEdit> {
+) -> anyhow::Result<lsp_types::WorkspaceEdit> {
let _p = profile::span("handle_ssr");
let selections = params
.selections
@@ -1343,7 +1396,7 @@ pub(crate) fn handle_ssr(
pub(crate) fn handle_inlay_hints(
snap: GlobalStateSnapshot,
params: InlayHintParams,
-) -> Result<Option<Vec<InlayHint>>> {
+) -> anyhow::Result<Option<Vec<InlayHint>>> {
let _p = profile::span("handle_inlay_hints");
let document_uri = &params.text_document.uri;
let FileRange { file_id, range } = from_proto::file_range(
@@ -1365,7 +1418,7 @@ pub(crate) fn handle_inlay_hints(
pub(crate) fn handle_inlay_hints_resolve(
_snap: GlobalStateSnapshot,
hint: InlayHint,
-) -> Result<InlayHint> {
+) -> anyhow::Result<InlayHint> {
let _p = profile::span("handle_inlay_hints_resolve");
Ok(hint)
}
@@ -1373,7 +1426,7 @@ pub(crate) fn handle_inlay_hints_resolve(
pub(crate) fn handle_call_hierarchy_prepare(
snap: GlobalStateSnapshot,
params: CallHierarchyPrepareParams,
-) -> Result<Option<Vec<CallHierarchyItem>>> {
+) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> {
let _p = profile::span("handle_call_hierarchy_prepare");
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
@@ -1395,7 +1448,7 @@ pub(crate) fn handle_call_hierarchy_prepare(
pub(crate) fn handle_call_hierarchy_incoming(
snap: GlobalStateSnapshot,
params: CallHierarchyIncomingCallsParams,
-) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
+) -> anyhow::Result<Option<Vec<CallHierarchyIncomingCall>>> {
let _p = profile::span("handle_call_hierarchy_incoming");
let item = params.item;
@@ -1430,7 +1483,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
pub(crate) fn handle_call_hierarchy_outgoing(
snap: GlobalStateSnapshot,
params: CallHierarchyOutgoingCallsParams,
-) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
+) -> anyhow::Result<Option<Vec<CallHierarchyOutgoingCall>>> {
let _p = profile::span("handle_call_hierarchy_outgoing");
let item = params.item;
@@ -1465,7 +1518,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
pub(crate) fn handle_semantic_tokens_full(
snap: GlobalStateSnapshot,
params: SemanticTokensParams,
-) -> Result<Option<SemanticTokensResult>> {
+) -> anyhow::Result<Option<SemanticTokensResult>> {
let _p = profile::span("handle_semantic_tokens_full");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
@@ -1495,7 +1548,7 @@ pub(crate) fn handle_semantic_tokens_full(
pub(crate) fn handle_semantic_tokens_full_delta(
snap: GlobalStateSnapshot,
params: SemanticTokensDeltaParams,
-) -> Result<Option<SemanticTokensFullDeltaResult>> {
+) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> {
let _p = profile::span("handle_semantic_tokens_full_delta");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
@@ -1535,7 +1588,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
pub(crate) fn handle_semantic_tokens_range(
snap: GlobalStateSnapshot,
params: SemanticTokensRangeParams,
-) -> Result<Option<SemanticTokensRangeResult>> {
+) -> anyhow::Result<Option<SemanticTokensRangeResult>> {
let _p = profile::span("handle_semantic_tokens_range");
let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
@@ -1561,7 +1614,7 @@ pub(crate) fn handle_semantic_tokens_range(
pub(crate) fn handle_open_docs(
snap: GlobalStateSnapshot,
params: lsp_types::TextDocumentPositionParams,
-) -> Result<ExternalDocsResponse> {
+) -> anyhow::Result<ExternalDocsResponse> {
let _p = profile::span("handle_open_docs");
let position = from_proto::file_position(&snap, params)?;
@@ -1582,9 +1635,9 @@ pub(crate) fn handle_open_docs(
let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else {
return if snap.config.local_docs() {
Ok(ExternalDocsResponse::WithLocal(Default::default()))
- } else {
+ } else {
Ok(ExternalDocsResponse::Simple(None))
- }
+ };
};
let web = remote_urls.web_url.and_then(|it| Url::parse(&it).ok());
@@ -1600,7 +1653,7 @@ pub(crate) fn handle_open_docs(
pub(crate) fn handle_open_cargo_toml(
snap: GlobalStateSnapshot,
params: lsp_ext::OpenCargoTomlParams,
-) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
let _p = profile::span("handle_open_cargo_toml");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
@@ -1618,7 +1671,7 @@ pub(crate) fn handle_open_cargo_toml(
pub(crate) fn handle_move_item(
snap: GlobalStateSnapshot,
params: lsp_ext::MoveItemParams,
-) -> Result<Vec<lsp_ext::SnippetTextEdit>> {
+) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> {
let _p = profile::span("handle_move_item");
let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
let range = from_proto::file_range(&snap, params.text_document, params.range)?;
@@ -1637,6 +1690,34 @@ pub(crate) fn handle_move_item(
}
}
+pub(crate) fn handle_view_recursive_memory_layout(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> {
+ let _p = profile::span("view_recursive_memory_layout");
+ let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, params.position)?;
+
+ let res = snap.analysis.get_recursive_memory_layout(FilePosition { file_id, offset })?;
+ Ok(res.map(|it| lsp_ext::RecursiveMemoryLayout {
+ nodes: it
+ .nodes
+ .iter()
+ .map(|n| lsp_ext::MemoryLayoutNode {
+ item_name: n.item_name.clone(),
+ typename: n.typename.clone(),
+ size: n.size,
+ offset: n.offset,
+ alignment: n.alignment,
+ parent_idx: n.parent_idx,
+ children_start: n.children_start,
+ children_len: n.children_len,
+ })
+ .collect(),
+ }))
+}
+
fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink {
lsp_ext::CommandLink { tooltip: Some(tooltip), command }
}
@@ -1792,7 +1873,7 @@ fn run_rustfmt(
snap: &GlobalStateSnapshot,
text_document: TextDocumentIdentifier,
range: Option<lsp_types::Range>,
-) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
let file_id = from_proto::file_id(snap, &text_document.uri)?;
let file = snap.analysis.file_text(file_id)?;
@@ -1942,7 +2023,7 @@ fn run_rustfmt(
pub(crate) fn fetch_dependency_list(
state: GlobalStateSnapshot,
_params: FetchDependencyListParams,
-) -> Result<FetchDependencyListResult> {
+) -> anyhow::Result<FetchDependencyListResult> {
let crates = state.analysis.fetch_crates()?;
let crate_infos = crates
.into_iter()
@@ -1987,3 +2068,42 @@ fn to_url(path: VfsPath) -> Option<Url> {
let str_path = path.as_os_str().to_str()?;
Url::from_file_path(str_path).ok()
}
+
+fn resource_ops_supported(config: &Config, kind: ResourceOperationKind) -> anyhow::Result<()> {
+ #[rustfmt::skip]
+ let resops = (|| {
+ config
+ .caps()
+ .workspace
+ .as_ref()?
+ .workspace_edit
+ .as_ref()?
+ .resource_operations
+ .as_ref()
+ })();
+
+ if !matches!(resops, Some(resops) if resops.contains(&kind)) {
+ return Err(LspError::new(
+ ErrorCode::RequestFailed as i32,
+ format!(
+ "Client does not support {} capability.",
+ match kind {
+ ResourceOperationKind::Create => "create",
+ ResourceOperationKind::Rename => "rename",
+ ResourceOperationKind::Delete => "delete",
+ }
+ ),
+ )
+ .into());
+ }
+
+ Ok(())
+}
+
+fn resolve_resource_op(op: &ResourceOp) -> ResourceOperationKind {
+ match op {
+ ResourceOp::Create(_) => ResourceOperationKind::Create,
+ ResourceOp::Rename(_) => ResourceOperationKind::Rename,
+ ResourceOp::Delete(_) => ResourceOperationKind::Delete,
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index bd9f471a4..5a11012b9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -20,7 +20,7 @@ use test_utils::project_root;
use triomphe::Arc;
use vfs::{AbsPathBuf, VfsPath};
-use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
#[test]
fn integrated_highlighting_benchmark() {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
index 65de4366e..57e26c241 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -54,13 +54,12 @@ use serde::de::DeserializeOwned;
pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version};
-pub type Error = Box<dyn std::error::Error + Send + Sync>;
-pub type Result<T, E = Error> = std::result::Result<T, E>;
-
-pub fn from_json<T: DeserializeOwned>(what: &'static str, json: &serde_json::Value) -> Result<T> {
- let res = serde_json::from_value(json.clone())
- .map_err(|e| format!("Failed to deserialize {what}: {e}; {json}"))?;
- Ok(res)
+pub fn from_json<T: DeserializeOwned>(
+ what: &'static str,
+ json: &serde_json::Value,
+) -> anyhow::Result<T> {
+ serde_json::from_value(json.clone())
+ .map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
index 4d67c8b30..d0989b323 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
@@ -182,6 +182,33 @@ pub struct ExpandedMacro {
pub expansion: String,
}
+pub enum ViewRecursiveMemoryLayout {}
+
+impl Request for ViewRecursiveMemoryLayout {
+ type Params = lsp_types::TextDocumentPositionParams;
+ type Result = Option<RecursiveMemoryLayout>;
+ const METHOD: &'static str = "rust-analyzer/viewRecursiveMemoryLayout";
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct RecursiveMemoryLayout {
+ pub nodes: Vec<MemoryLayoutNode>,
+}
+
+#[derive(Deserialize, Serialize, Debug)]
+#[serde(rename_all = "camelCase")]
+pub struct MemoryLayoutNode {
+ pub item_name: String,
+ pub typename: String,
+ pub size: u64,
+ pub offset: u64,
+ pub alignment: u64,
+ pub parent_idx: i64,
+ pub children_start: i64,
+ pub children_len: u64,
+}
+
pub enum CancelFlycheck {}
impl Notification for CancelFlycheck {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 02dd94e5f..74036710f 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -23,10 +23,9 @@ use crate::{
lsp_ext,
lsp_utils::{notification_is, Progress},
reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
- Result,
};
-pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
+pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> {
tracing::info!("initial config: {:#?}", config);
// Windows scheduler implements priority boosts: if thread waits for an
@@ -109,7 +108,7 @@ impl fmt::Debug for Event {
}
impl GlobalState {
- fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
+ fn run(mut self, inbox: Receiver<lsp_server::Message>) -> anyhow::Result<()> {
self.update_status_or_notify();
if self.config.did_save_text_document_dynamic_registration() {
@@ -134,7 +133,7 @@ impl GlobalState {
self.handle_event(event)?;
}
- Err("client exited without proper shutdown sequence".into())
+ anyhow::bail!("client exited without proper shutdown sequence")
}
fn register_did_save_capability(&mut self) {
@@ -191,7 +190,7 @@ impl GlobalState {
}
}
- fn handle_event(&mut self, event: Event) -> Result<()> {
+ fn handle_event(&mut self, event: Event) -> anyhow::Result<()> {
let loop_start = Instant::now();
// NOTE: don't count blocking select! call as a loop-turn time
let _p = profile::span("GlobalState::handle_event");
@@ -754,11 +753,12 @@ impl GlobalState {
)
.on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
.on::<lsp_ext::Ssr>(handlers::handle_ssr)
+ .on::<lsp_ext::ViewRecursiveMemoryLayout>(handlers::handle_view_recursive_memory_layout)
.finish();
}
/// Handles an incoming notification.
- fn on_notification(&mut self, not: Notification) -> Result<()> {
+ fn on_notification(&mut self, not: Notification) -> anyhow::Result<()> {
use crate::handlers::notification as handlers;
use lsp_types::notification as notifs;
@@ -843,11 +843,7 @@ impl GlobalState {
d.code.as_str().to_string(),
)),
code_description: Some(lsp_types::CodeDescription {
- href: lsp_types::Url::parse(&format!(
- "https://rust-analyzer.github.io/manual.html#{}",
- d.code.as_str()
- ))
- .unwrap(),
+ href: lsp_types::Url::parse(&d.code.url()).unwrap(),
}),
source: Some("rust-analyzer".to_string()),
message: d.message,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index 310c6b076..0a2bb8224 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -12,26 +12,22 @@
//! correct. Instead, we try to provide a best-effort service. Even if the
//! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests.
-use std::{collections::hash_map::Entry, iter, mem, sync};
+use std::{iter, mem};
use flycheck::{FlycheckConfig, FlycheckHandle};
use hir::db::DefDatabase;
use ide::Change;
use ide_db::{
- base_db::{
- salsa::Durability, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
- ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, SourceRoot, VfsPath,
- },
+ base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros},
FxHashMap,
};
-use itertools::Itertools;
-use proc_macro_api::{MacroDylib, ProcMacroServer};
-use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts};
+use load_cargo::{load_proc_macro, ProjectFolders};
+use proc_macro_api::ProcMacroServer;
+use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
use rustc_hash::FxHashSet;
use stdx::{format_to, thread::ThreadIntent};
-use syntax::SmolStr;
use triomphe::Arc;
-use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
+use vfs::{AbsPath, ChangeKind};
use crate::{
config::{Config, FilesWatcher, LinkedProject},
@@ -41,8 +37,6 @@ use crate::{
op_queue::Cause,
};
-use ::tt::token_id as tt;
-
#[derive(Debug)]
pub(crate) enum ProjectWorkspaceProgress {
Begin,
@@ -120,6 +114,11 @@ impl GlobalState {
if self.proc_macro_clients.iter().any(|it| it.is_err()) {
status.health = lsp_ext::Health::Warning;
message.push_str("Failed to spawn one or more proc-macro servers.\n\n");
+ for err in self.proc_macro_clients.iter() {
+ if let Err(err) = err {
+ format_to!(message, "- {err}\n");
+ }
+ }
}
if !self.config.cargo_autoreload()
&& self.is_quiescent()
@@ -307,7 +306,7 @@ impl GlobalState {
res.map_or_else(
|_| Err("proc macro crate is missing dylib".to_owned()),
|(crate_name, path)| {
- progress(path.display().to_string());
+ progress(path.to_string());
client.as_ref().map_err(Clone::clone).and_then(|client| {
load_proc_macro(
client,
@@ -340,7 +339,11 @@ impl GlobalState {
let _p = profile::span("GlobalState::switch_workspaces");
tracing::info!(%cause, "will switch workspaces");
- let Some((workspaces, force_reload_crate_graph)) = self.fetch_workspaces_queue.last_op_result() else { return; };
+ let Some((workspaces, force_reload_crate_graph)) =
+ self.fetch_workspaces_queue.last_op_result()
+ else {
+ return;
+ };
if let Err(_) = self.fetch_workspace_error() {
if !self.workspaces.is_empty() {
@@ -407,9 +410,9 @@ impl GlobalState {
.flat_map(|root| {
root.include.into_iter().flat_map(|it| {
[
- format!("{}/**/*.rs", it.display()),
- format!("{}/**/Cargo.toml", it.display()),
- format!("{}/**/Cargo.lock", it.display()),
+ format!("{it}/**/*.rs"),
+ format!("{it}/**/Cargo.toml"),
+ format!("{it}/**/Cargo.lock"),
]
})
})
@@ -447,17 +450,13 @@ impl GlobalState {
None => ws.find_sysroot_proc_macro_srv()?,
};
- tracing::info!("Using proc-macro server at {}", path.display(),);
+ tracing::info!("Using proc-macro server at {path}");
ProcMacroServer::spawn(path.clone()).map_err(|err| {
tracing::error!(
- "Failed to run proc-macro server from path {}, error: {:?}",
- path.display(),
- err
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
);
- anyhow::anyhow!(
- "Failed to run proc-macro server from path {}, error: {:?}",
- path.display(),
- err
+ anyhow::format_err!(
+ "Failed to run proc-macro server from path {path}, error: {err:?}",
)
})
})
@@ -534,7 +533,9 @@ impl GlobalState {
pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
let mut buf = String::new();
- let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
+ let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else {
+ return Ok(());
+ };
if last_op_result.is_empty() {
stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
} else {
@@ -621,254 +622,6 @@ impl GlobalState {
}
}
-#[derive(Default)]
-pub(crate) struct ProjectFolders {
- pub(crate) load: Vec<vfs::loader::Entry>,
- pub(crate) watch: Vec<usize>,
- pub(crate) source_root_config: SourceRootConfig,
-}
-
-impl ProjectFolders {
- pub(crate) fn new(
- workspaces: &[ProjectWorkspace],
- global_excludes: &[AbsPathBuf],
- ) -> ProjectFolders {
- let mut res = ProjectFolders::default();
- let mut fsc = FileSetConfig::builder();
- let mut local_filesets = vec![];
-
- // Dedup source roots
- // Depending on the project setup, we can have duplicated source roots, or for example in
- // the case of the rustc workspace, we can end up with two source roots that are almost the
- // same but not quite, like:
- // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
- // PackageRoot {
- // is_local: true,
- // include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
- // exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
- // }
- //
- // The first one comes from the explicit rustc workspace which points to the rustc workspace itself
- // The second comes from the rustc workspace that we load as the actual project workspace
- // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
- // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
- // so we need to also coalesce the includes if they overlap.
-
- let mut roots: Vec<_> = workspaces
- .iter()
- .flat_map(|ws| ws.to_roots())
- .update(|root| root.include.sort())
- .sorted_by(|a, b| a.include.cmp(&b.include))
- .collect();
-
- // map that tracks indices of overlapping roots
- let mut overlap_map = FxHashMap::<_, Vec<_>>::default();
- let mut done = false;
-
- while !mem::replace(&mut done, true) {
- // maps include paths to indices of the corresponding root
- let mut include_to_idx = FxHashMap::default();
- // Find and note down the indices of overlapping roots
- for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) {
- for include in &root.include {
- match include_to_idx.entry(include) {
- Entry::Occupied(e) => {
- overlap_map.entry(*e.get()).or_default().push(idx);
- }
- Entry::Vacant(e) => {
- e.insert(idx);
- }
- }
- }
- }
- for (k, v) in overlap_map.drain() {
- done = false;
- for v in v {
- let r = mem::replace(
- &mut roots[v],
- PackageRoot { is_local: false, include: vec![], exclude: vec![] },
- );
- roots[k].is_local |= r.is_local;
- roots[k].include.extend(r.include);
- roots[k].exclude.extend(r.exclude);
- }
- roots[k].include.sort();
- roots[k].exclude.sort();
- roots[k].include.dedup();
- roots[k].exclude.dedup();
- }
- }
-
- for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
- let file_set_roots: Vec<VfsPath> =
- root.include.iter().cloned().map(VfsPath::from).collect();
-
- let entry = {
- let mut dirs = vfs::loader::Directories::default();
- dirs.extensions.push("rs".into());
- dirs.include.extend(root.include);
- dirs.exclude.extend(root.exclude);
- for excl in global_excludes {
- if dirs
- .include
- .iter()
- .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
- {
- dirs.exclude.push(excl.clone());
- }
- }
-
- vfs::loader::Entry::Directories(dirs)
- };
-
- if root.is_local {
- res.watch.push(res.load.len());
- }
- res.load.push(entry);
-
- if root.is_local {
- local_filesets.push(fsc.len());
- }
- fsc.add_file_set(file_set_roots)
- }
-
- let fsc = fsc.build();
- res.source_root_config = SourceRootConfig { fsc, local_filesets };
-
- res
- }
-}
-
-#[derive(Default, Debug)]
-pub(crate) struct SourceRootConfig {
- pub(crate) fsc: FileSetConfig,
- pub(crate) local_filesets: Vec<usize>,
-}
-
-impl SourceRootConfig {
- pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
- let _p = profile::span("SourceRootConfig::partition");
- self.fsc
- .partition(vfs)
- .into_iter()
- .enumerate()
- .map(|(idx, file_set)| {
- let is_local = self.local_filesets.contains(&idx);
- if is_local {
- SourceRoot::new_local(file_set)
- } else {
- SourceRoot::new_library(file_set)
- }
- })
- .collect()
- }
-}
-
-/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
-/// with an identity dummy expander.
-pub(crate) fn load_proc_macro(
- server: &ProcMacroServer,
- path: &AbsPath,
- dummy_replace: &[Box<str>],
-) -> ProcMacroLoadResult {
- let res: Result<Vec<_>, String> = (|| {
- let dylib = MacroDylib::new(path.to_path_buf());
- let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
- if vec.is_empty() {
- return Err("proc macro library returned no proc macros".to_string());
- }
- Ok(vec
- .into_iter()
- .map(|expander| expander_to_proc_macro(expander, dummy_replace))
- .collect())
- })();
- return match res {
- Ok(proc_macros) => {
- tracing::info!(
- "Loaded proc-macros for {}: {:?}",
- path.display(),
- proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
- );
- Ok(proc_macros)
- }
- Err(e) => {
- tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
- Err(e)
- }
- };
-
- fn expander_to_proc_macro(
- expander: proc_macro_api::ProcMacro,
- dummy_replace: &[Box<str>],
- ) -> ProcMacro {
- let name = SmolStr::from(expander.name());
- let kind = match expander.kind() {
- proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
- proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
- proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
- };
- let expander: sync::Arc<dyn ProcMacroExpander> =
- if dummy_replace.iter().any(|replace| &**replace == name) {
- match kind {
- ProcMacroKind::Attr => sync::Arc::new(IdentityExpander),
- _ => sync::Arc::new(EmptyExpander),
- }
- } else {
- sync::Arc::new(Expander(expander))
- };
- ProcMacro { name, kind, expander }
- }
-
- #[derive(Debug)]
- struct Expander(proc_macro_api::ProcMacro);
-
- impl ProcMacroExpander for Expander {
- fn expand(
- &self,
- subtree: &tt::Subtree,
- attrs: Option<&tt::Subtree>,
- env: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
- match self.0.expand(subtree, attrs, env) {
- Ok(Ok(subtree)) => Ok(subtree),
- Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
- Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
- }
- }
- }
-
- /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
- #[derive(Debug)]
- struct IdentityExpander;
-
- impl ProcMacroExpander for IdentityExpander {
- fn expand(
- &self,
- subtree: &tt::Subtree,
- _: Option<&tt::Subtree>,
- _: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- Ok(subtree.clone())
- }
- }
-
- /// Empty expander, used for proc-macros that are deliberately ignored by the user.
- #[derive(Debug)]
- struct EmptyExpander;
-
- impl ProcMacroExpander for EmptyExpander {
- fn expand(
- &self,
- _: &tt::Subtree,
- _: Option<&tt::Subtree>,
- _: &Env,
- ) -> Result<tt::Subtree, ProcMacroExpansionError> {
- Ok(tt::Subtree::empty())
- }
- }
-}
-
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
index d4bb20c8f..1fe02fc7e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
@@ -78,6 +78,7 @@ define_semantic_token_types![
(DERIVE_HELPER, "deriveHelper") => DECORATOR,
(DOT, "dot"),
(ESCAPE_SEQUENCE, "escapeSequence") => STRING,
+ (INVALID_ESCAPE_SEQUENCE, "invalidEscapeSequence") => STRING,
(FORMAT_SPECIFIER, "formatSpecifier") => STRING,
(GENERIC, "generic") => TYPE_PARAMETER,
(LABEL, "label"),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
index 648bc995a..7b32180e3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
@@ -10,8 +10,8 @@ use ide::{
CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
InlayHintLabel, InlayHintLabelPart, InlayKind, Markup, NavigationTarget, ReferenceCategory,
- RenameError, Runnable, Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind,
- TextEdit, TextRange, TextSize,
+ RenameError, Runnable, Severity, SignatureHelp, SnippetEdit, SourceChange, StructureNodeKind,
+ SymbolKind, TextEdit, TextRange, TextSize,
};
use itertools::Itertools;
use serde_json::to_value;
@@ -22,7 +22,7 @@ use crate::{
config::{CallInfoConfig, Config},
global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, PositionEncoding},
- lsp_ext,
+ lsp_ext::{self, SnippetTextEdit},
lsp_utils::invalid_params_error,
semantic_tokens::{self, standard_fallback_type},
};
@@ -94,7 +94,10 @@ pub(crate) fn document_highlight_kind(
pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
match severity {
Severity::Error => lsp_types::DiagnosticSeverity::ERROR,
+ Severity::Warning => lsp_types::DiagnosticSeverity::WARNING,
Severity::WeakWarning => lsp_types::DiagnosticSeverity::HINT,
+ // unreachable
+ Severity::Allow => lsp_types::DiagnosticSeverity::INFORMATION,
}
}
@@ -637,6 +640,7 @@ fn semantic_token_type_and_modifiers(
HlTag::CharLiteral => semantic_tokens::CHAR,
HlTag::Comment => semantic_tokens::COMMENT,
HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
+ HlTag::InvalidEscapeSequence => semantic_tokens::INVALID_ESCAPE_SEQUENCE,
HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
HlTag::Keyword => semantic_tokens::KEYWORD,
HlTag::None => semantic_tokens::GENERIC,
@@ -881,16 +885,136 @@ fn outside_workspace_annotation_id() -> String {
String::from("OutsideWorkspace")
}
+fn merge_text_and_snippet_edits(
+ line_index: &LineIndex,
+ edit: TextEdit,
+ snippet_edit: SnippetEdit,
+) -> Vec<SnippetTextEdit> {
+ let mut edits: Vec<SnippetTextEdit> = vec![];
+ let mut snippets = snippet_edit.into_edit_ranges().into_iter().peekable();
+ let mut text_edits = edit.into_iter();
+
+ while let Some(current_indel) = text_edits.next() {
+ let new_range = {
+ let insert_len =
+ TextSize::try_from(current_indel.insert.len()).unwrap_or(TextSize::from(u32::MAX));
+ TextRange::at(current_indel.delete.start(), insert_len)
+ };
+
+ // insert any snippets before the text edit
+ for (snippet_index, snippet_range) in
+ snippets.take_while_ref(|(_, range)| range.end() < new_range.start())
+ {
+ let snippet_range = if !stdx::always!(
+ snippet_range.is_empty(),
+ "placeholder range {:?} is before current text edit range {:?}",
+ snippet_range,
+ new_range
+ ) {
+ // only possible for tabstops, so make sure it's an empty/insert range
+ TextRange::empty(snippet_range.start())
+ } else {
+ snippet_range
+ };
+
+ let range = range(&line_index, snippet_range);
+ let new_text = format!("${snippet_index}");
+
+ edits.push(SnippetTextEdit {
+ range,
+ new_text,
+ insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET),
+ annotation_id: None,
+ })
+ }
+
+ if snippets.peek().is_some_and(|(_, range)| new_range.intersect(*range).is_some()) {
+ // at least one snippet edit intersects this text edit,
+ // so gather all of the edits that intersect this text edit
+ let mut all_snippets = snippets
+ .take_while_ref(|(_, range)| new_range.intersect(*range).is_some())
+ .collect_vec();
+
+ // ensure all of the ranges are wholly contained inside of the new range
+ all_snippets.retain(|(_, range)| {
+ stdx::always!(
+ new_range.contains_range(*range),
+ "found placeholder range {:?} which wasn't fully inside of text edit's new range {:?}", range, new_range
+ )
+ });
+
+ let mut text_edit = text_edit(line_index, current_indel);
+
+ // escape out snippet text
+ stdx::replace(&mut text_edit.new_text, '\\', r"\\");
+ stdx::replace(&mut text_edit.new_text, '$', r"\$");
+
+ // ...and apply!
+ for (index, range) in all_snippets.iter().rev() {
+ let start = (range.start() - new_range.start()).into();
+ let end = (range.end() - new_range.start()).into();
+
+ if range.is_empty() {
+ text_edit.new_text.insert_str(start, &format!("${index}"));
+ } else {
+ text_edit.new_text.insert(end, '}');
+ text_edit.new_text.insert_str(start, &format!("${{{index}:"));
+ }
+ }
+
+ edits.push(SnippetTextEdit {
+ range: text_edit.range,
+ new_text: text_edit.new_text,
+ insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET),
+ annotation_id: None,
+ })
+ } else {
+ // snippet edit was beyond the current one
+ // since it wasn't consumed, it's available for the next pass
+ edits.push(snippet_text_edit(line_index, false, current_indel));
+ }
+ }
+
+ // insert any remaining tabstops
+ edits.extend(snippets.map(|(snippet_index, snippet_range)| {
+ let snippet_range = if !stdx::always!(
+ snippet_range.is_empty(),
+ "found placeholder snippet {:?} without a text edit",
+ snippet_range
+ ) {
+ TextRange::empty(snippet_range.start())
+ } else {
+ snippet_range
+ };
+
+ let range = range(&line_index, snippet_range);
+ let new_text = format!("${snippet_index}");
+
+ SnippetTextEdit {
+ range,
+ new_text,
+ insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET),
+ annotation_id: None,
+ }
+ }));
+
+ edits
+}
+
pub(crate) fn snippet_text_document_edit(
snap: &GlobalStateSnapshot,
is_snippet: bool,
file_id: FileId,
edit: TextEdit,
+ snippet_edit: Option<SnippetEdit>,
) -> Cancellable<lsp_ext::SnippetTextDocumentEdit> {
let text_document = optional_versioned_text_document_identifier(snap, file_id);
let line_index = snap.file_line_index(file_id)?;
- let mut edits: Vec<_> =
- edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
+ let mut edits = if let Some(snippet_edit) = snippet_edit {
+ merge_text_and_snippet_edits(&line_index, edit, snippet_edit)
+ } else {
+ edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect()
+ };
if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
for edit in &mut edits {
@@ -970,8 +1094,14 @@ pub(crate) fn snippet_workspace_edit(
let ops = snippet_text_document_ops(snap, op)?;
document_changes.extend_from_slice(&ops);
}
- for (file_id, edit) in source_change.source_file_edits {
- let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
+ for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
+ let edit = snippet_text_document_edit(
+ snap,
+ source_change.is_snippet,
+ file_id,
+ edit,
+ snippet_edit,
+ )?;
document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
}
let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
@@ -1410,7 +1540,9 @@ pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
#[cfg(test)]
mod tests {
+ use expect_test::{expect, Expect};
use ide::{Analysis, FilePosition};
+ use ide_db::source_change::Snippet;
use test_utils::extract_offset;
use triomphe::Arc;
@@ -1480,6 +1612,481 @@ fn bar(_: usize) {}
assert!(!docs.contains("use crate::bar"));
}
+ fn check_rendered_snippets(edit: TextEdit, snippets: SnippetEdit, expect: Expect) {
+ let text = r#"/* place to put all ranges in */"#;
+ let line_index = LineIndex {
+ index: Arc::new(ide::LineIndex::new(text)),
+ endings: LineEndings::Unix,
+ encoding: PositionEncoding::Utf8,
+ };
+
+ let res = merge_text_and_snippet_edits(&line_index, edit, snippets);
+ expect.assert_debug_eq(&res);
+ }
+
+ #[test]
+ fn snippet_rendering_only_tabstops() {
+ let edit = TextEdit::builder().finish();
+ let snippets = SnippetEdit::new(vec![
+ Snippet::Tabstop(0.into()),
+ Snippet::Tabstop(0.into()),
+ Snippet::Tabstop(1.into()),
+ Snippet::Tabstop(1.into()),
+ ]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$1",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$2",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 1,
+ },
+ end: Position {
+ line: 0,
+ character: 1,
+ },
+ },
+ new_text: "$3",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 1,
+ },
+ end: Position {
+ line: 0,
+ character: 1,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_only_text_edits() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abc".to_owned());
+ edit.insert(3.into(), "def".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 3,
+ },
+ end: Position {
+ line: 0,
+ character: 3,
+ },
+ },
+ new_text: "def",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_tabstop_after_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abc".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![Snippet::Tabstop(7.into())]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 7,
+ },
+ end: Position {
+ line: 0,
+ character: 7,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_tabstops_before_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(2.into(), "abc".to_owned());
+ let edit = edit.finish();
+ let snippets =
+ SnippetEdit::new(vec![Snippet::Tabstop(0.into()), Snippet::Tabstop(0.into())]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$1",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 2,
+ },
+ end: Position {
+ line: 0,
+ character: 2,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_tabstops_between_text_edits() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abc".to_owned());
+ edit.insert(7.into(), "abc".to_owned());
+ let edit = edit.finish();
+ let snippets =
+ SnippetEdit::new(vec![Snippet::Tabstop(4.into()), Snippet::Tabstop(4.into())]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 4,
+ },
+ end: Position {
+ line: 0,
+ character: 4,
+ },
+ },
+ new_text: "$1",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 4,
+ },
+ end: Position {
+ line: 0,
+ character: 4,
+ },
+ },
+ new_text: "$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 7,
+ },
+ end: Position {
+ line: 0,
+ character: 7,
+ },
+ },
+ new_text: "abc",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_multiple_tabstops_in_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abcdefghijkl".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![
+ Snippet::Tabstop(0.into()),
+ Snippet::Tabstop(5.into()),
+ Snippet::Tabstop(12.into()),
+ ]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "$1abcde$2fghijkl$0",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_multiple_placeholders_in_text_edit() {
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), "abcdefghijkl".to_owned());
+ let edit = edit.finish();
+ let snippets = SnippetEdit::new(vec![
+ Snippet::Placeholder(TextRange::new(0.into(), 3.into())),
+ Snippet::Placeholder(TextRange::new(5.into(), 7.into())),
+ Snippet::Placeholder(TextRange::new(10.into(), 12.into())),
+ ]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "${1:abc}de${2:fg}hij${0:kl}",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn snippet_rendering_escape_snippet_bits() {
+ // only needed for snippet formats
+ let mut edit = TextEdit::builder();
+ edit.insert(0.into(), r"abc\def$".to_owned());
+ edit.insert(8.into(), r"ghi\jkl$".to_owned());
+ let edit = edit.finish();
+ let snippets =
+ SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(0.into(), 3.into()))]);
+
+ check_rendered_snippets(
+ edit,
+ snippets,
+ expect![[r#"
+ [
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 0,
+ },
+ end: Position {
+ line: 0,
+ character: 0,
+ },
+ },
+ new_text: "${0:abc}\\\\def\\$",
+ insert_text_format: Some(
+ Snippet,
+ ),
+ annotation_id: None,
+ },
+ SnippetTextEdit {
+ range: Range {
+ start: Position {
+ line: 0,
+ character: 8,
+ },
+ end: Position {
+ line: 0,
+ character: 8,
+ },
+ },
+ new_text: "ghi\\jkl$",
+ insert_text_format: None,
+ annotation_id: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
// `Url` is not able to parse windows paths on unix machines.
#[test]
#[cfg(target_os = "windows")]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
index b2a8041ae..3c52ef5ef 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -13,6 +13,7 @@ use rust_analyzer::{config::Config, lsp_ext, main_loop};
use serde::Serialize;
use serde_json::{json, to_string_pretty, Value};
use test_utils::FixtureWithProjectMeta;
+use tracing_subscriber::{prelude::*, Layer};
use vfs::AbsPathBuf;
use crate::testdir::TestDir;
@@ -24,7 +25,7 @@ pub(crate) struct Project<'a> {
config: serde_json::Value,
}
-impl<'a> Project<'a> {
+impl Project<'_> {
pub(crate) fn with_fixture(fixture: &str) -> Project<'_> {
Project {
fixture,
@@ -47,17 +48,17 @@ impl<'a> Project<'a> {
}
}
- pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Project<'a> {
+ pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self {
self.tmp_dir = Some(tmp_dir);
self
}
- pub(crate) fn root(mut self, path: &str) -> Project<'a> {
+ pub(crate) fn root(mut self, path: &str) -> Self {
self.roots.push(path.into());
self
}
- pub(crate) fn with_config(mut self, config: serde_json::Value) -> Project<'a> {
+ pub(crate) fn with_config(mut self, config: serde_json::Value) -> Self {
fn merge(dst: &mut serde_json::Value, src: serde_json::Value) {
match (dst, src) {
(Value::Object(dst), Value::Object(src)) => {
@@ -76,10 +77,11 @@ impl<'a> Project<'a> {
let tmp_dir = self.tmp_dir.unwrap_or_else(TestDir::new);
static INIT: Once = Once::new();
INIT.call_once(|| {
- tracing_subscriber::fmt()
- .with_test_writer()
- .with_env_filter(tracing_subscriber::EnvFilter::from_env("RA_LOG"))
- .init();
+ let filter: tracing_subscriber::filter::Targets =
+ std::env::var("RA_LOG").ok().and_then(|it| it.parse().ok()).unwrap_or_default();
+ let layer =
+ tracing_subscriber::fmt::Layer::new().with_test_writer().with_filter(filter);
+ tracing_subscriber::Registry::default().with(layer).init();
profile::init_from(crate::PROFILE);
});
@@ -111,6 +113,14 @@ impl<'a> Project<'a> {
relative_pattern_support: None,
},
),
+ workspace_edit: Some(lsp_types::WorkspaceEditClientCapabilities {
+ resource_operations: Some(vec![
+ lsp_types::ResourceOperationKind::Create,
+ lsp_types::ResourceOperationKind::Delete,
+ lsp_types::ResourceOperationKind::Rename,
+ ]),
+ ..Default::default()
+ }),
..Default::default()
}),
text_document: Some(lsp_types::TextDocumentClientCapabilities {
diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
index c5da6ceb4..1514c6c7d 100644
--- a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs
@@ -61,7 +61,9 @@ impl CommentBlock {
let mut blocks = CommentBlock::extract_untagged(text);
blocks.retain_mut(|block| {
let first = block.contents.remove(0);
- let Some(id) = first.strip_prefix(&tag) else { return false; };
+ let Some(id) = first.strip_prefix(&tag) else {
+ return false;
+ };
if block.is_doc {
panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}");
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
index a67f36ae9..536f000a4 100644
--- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -13,7 +13,7 @@ doctest = false
[dependencies]
libc = "0.2.135"
-backtrace = { version = "0.3.65", optional = true }
+backtrace = { version = "0.3.67", optional = true }
always-assert = { version = "0.1.2", features = ["log"] }
jod-thread = "0.1.2"
crossbeam-channel = "0.5.5"
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
index fb38d25ab..5ee0c4792 100644
--- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -19,7 +19,7 @@ itertools = "0.10.5"
rowan = "0.15.11"
rustc-hash = "1.1.0"
once_cell = "1.17.0"
-indexmap = "1.9.1"
+indexmap = "2.0.0"
smol_str.workspace = true
triomphe.workspace = true
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index b096c9974..138ddd208 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -72,6 +72,12 @@ TokenTree =
MacroItems =
Item*
+MacroEagerInput =
+ '(' (Expr (',' Expr)* ','?)? ')'
+| '{' (Expr (',' Expr)* ','?)? '}'
+| '[' (Expr (',' Expr)* ','?)? ']'
+
+
MacroStmts =
statements:Stmt*
Expr?
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
index b3ea6ca8d..a150d9e6c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -213,6 +213,28 @@ pub trait AttrsOwnerEdit: ast::HasAttrs {
}
}
}
+
+ fn add_attr(&self, attr: ast::Attr) {
+ add_attr(self.syntax(), attr);
+
+ fn add_attr(node: &SyntaxNode, attr: ast::Attr) {
+ let indent = IndentLevel::from_node(node);
+ attr.reindent_to(indent);
+
+ let after_attrs_and_comments = node
+ .children_with_tokens()
+ .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
+ .map_or(Position::first_child_of(node), |it| Position::before(it));
+
+ ted::insert_all(
+ after_attrs_and_comments,
+ vec![
+ attr.syntax().clone().into(),
+ make::tokens::whitespace(&format!("\n{indent}")).into(),
+ ],
+ )
+ }
+ }
}
impl<T: ast::HasAttrs> AttrsOwnerEdit for T {}
@@ -358,6 +380,26 @@ impl Removable for ast::UseTree {
}
impl ast::UseTree {
+ /// Deletes the usetree node represented by the input. Recursively removes parents, including use nodes that become empty.
+ pub fn remove_recursive(self) {
+ let parent = self.syntax().parent();
+
+ self.remove();
+
+ if let Some(u) = parent.clone().and_then(ast::Use::cast) {
+ if u.use_tree().is_none() {
+ u.remove();
+ }
+ } else if let Some(u) = parent.and_then(ast::UseTreeList::cast) {
+ if u.use_trees().next().is_none() {
+ let parent = u.syntax().parent().and_then(ast::UseTree::cast);
+ if let Some(u) = parent {
+ u.remove_recursive();
+ }
+ }
+ }
+ }
+
pub fn get_or_create_use_tree_list(&self) -> ast::UseTreeList {
match self.use_tree_list() {
Some(it) => it,
@@ -465,6 +507,22 @@ impl Removable for ast::Use {
}
}
}
+ let prev_ws = self
+ .syntax()
+ .prev_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(prev_ws) = prev_ws {
+ let ws_text = prev_ws.syntax().text();
+ let prev_newline = ws_text.rfind('\n').map(|x| x + 1).unwrap_or(0);
+ let rest = &ws_text[0..prev_newline];
+ if rest.is_empty() {
+ ted::remove(prev_ws.syntax());
+ } else {
+ ted::replace(prev_ws.syntax(), make::tokens::whitespace(rest));
+ }
+ }
+
ted::remove(self.syntax());
}
}
@@ -676,12 +734,6 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
}
}
-impl ast::StmtList {
- pub fn push_front(&self, statement: ast::Stmt) {
- ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
- }
-}
-
impl ast::VariantList {
pub fn add_variant(&self, variant: ast::Variant) {
let (indent, position) = match self.variants().last() {
@@ -732,6 +784,27 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
Some(())
}
+pub trait HasVisibilityEdit: ast::HasVisibility {
+ fn set_visibility(&self, visbility: ast::Visibility) {
+ match self.visibility() {
+ Some(current_visibility) => {
+ ted::replace(current_visibility.syntax(), visbility.syntax())
+ }
+ None => {
+ let vis_before = self
+ .syntax()
+ .children_with_tokens()
+ .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
+ .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap());
+
+ ted::insert(ted::Position::before(vis_before), visbility.syntax());
+ }
+ }
+ }
+}
+
+impl<T: ast::HasVisibility> HasVisibilityEdit for T {}
+
pub trait Indent: AstNode + Clone + Sized {
fn indent_level(&self) -> IndentLevel {
IndentLevel::from_node(self.syntax())
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index e520801ea..0b27faa53 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -198,6 +198,20 @@ impl ast::HasModuleItem for MacroItems {}
impl MacroItems {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroEagerInput {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroEagerInput {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroStmts {
pub(crate) syntax: SyntaxNode,
}
@@ -1922,6 +1936,17 @@ impl AstNode for MacroItems {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
+impl AstNode for MacroEagerInput {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EAGER_INPUT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
impl AstNode for MacroStmts {
fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
fn cast(syntax: SyntaxNode) -> Option<Self> {
@@ -4360,6 +4385,11 @@ impl std::fmt::Display for MacroItems {
std::fmt::Display::fmt(self.syntax(), f)
}
}
+impl std::fmt::Display for MacroEagerInput {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
impl std::fmt::Display for MacroStmts {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
index 3c2b7e56b..4c6db0ef0 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -10,6 +10,8 @@
//! `parse(format!())` we use internally is an implementation detail -- long
//! term, it will be replaced with direct tree manipulation.
use itertools::Itertools;
+use parser::T;
+use rowan::NodeOrToken;
use stdx::{format_to, never};
use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken};
@@ -447,6 +449,21 @@ pub fn block_expr(
ast_from_text(&format!("fn f() {buf}"))
}
+pub fn async_move_block_expr(
+ stmts: impl IntoIterator<Item = ast::Stmt>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "async move {\n".to_string();
+ for stmt in stmts.into_iter() {
+ format_to!(buf, " {stmt}\n");
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {tail_expr}\n");
+ }
+ buf += "}";
+ ast_from_text(&format!("const _: () = {buf};"))
+}
+
pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr {
ast_from_text(&format!("fn f() {{ {tail_expr} }}"))
}
@@ -848,6 +865,36 @@ pub fn param_list(
ast_from_text(&list)
}
+pub fn trait_(
+ is_unsafe: bool,
+ ident: &str,
+ gen_params: Option<ast::GenericParamList>,
+ where_clause: Option<ast::WhereClause>,
+ assoc_items: ast::AssocItemList,
+) -> ast::Trait {
+ let mut text = String::new();
+
+ if is_unsafe {
+ format_to!(text, "unsafe ");
+ }
+
+ format_to!(text, "trait {ident}");
+
+ if let Some(gen_params) = gen_params {
+ format_to!(text, "{} ", gen_params.to_string());
+ } else {
+ text.push(' ');
+ }
+
+ if let Some(where_clause) = where_clause {
+ format_to!(text, "{} ", where_clause.to_string());
+ }
+
+ format_to!(text, "{}", assoc_items.to_string());
+
+ ast_from_text(&text)
+}
+
pub fn type_bound(bound: &str) -> ast::TypeBound {
ast_from_text(&format!("fn f<T: {bound}>() {{ }}"))
}
@@ -985,6 +1032,41 @@ pub fn struct_(
ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",))
}
+pub fn attr_outer(meta: ast::Meta) -> ast::Attr {
+ ast_from_text(&format!("#[{meta}]"))
+}
+
+pub fn attr_inner(meta: ast::Meta) -> ast::Attr {
+ ast_from_text(&format!("#![{meta}]"))
+}
+
+pub fn meta_expr(path: ast::Path, expr: ast::Expr) -> ast::Meta {
+ ast_from_text(&format!("#[{path} = {expr}]"))
+}
+
+pub fn meta_token_tree(path: ast::Path, tt: ast::TokenTree) -> ast::Meta {
+ ast_from_text(&format!("#[{path}{tt}]"))
+}
+
+pub fn meta_path(path: ast::Path) -> ast::Meta {
+ ast_from_text(&format!("#[{path}]"))
+}
+
+pub fn token_tree(
+ delimiter: SyntaxKind,
+ tt: Vec<NodeOrToken<ast::TokenTree, SyntaxToken>>,
+) -> ast::TokenTree {
+ let (l_delimiter, r_delimiter) = match delimiter {
+ T!['('] => ('(', ')'),
+ T!['['] => ('[', ']'),
+ T!['{'] => ('{', '}'),
+ _ => panic!("invalid delimiter `{delimiter:?}`"),
+ };
+ let tt = tt.into_iter().join("");
+
+ ast_from_text(&format!("tt!{l_delimiter}{tt}{r_delimiter}"))
+}
+
#[track_caller]
fn ast_from_text<N: AstNode>(text: &str) -> N {
let parse = SourceFile::parse(text);
@@ -1022,6 +1104,17 @@ pub mod tokens {
)
});
+ pub fn semicolon() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == SEMICOLON)
+ .unwrap()
+ }
+
pub fn single_space() -> SyntaxToken {
SOURCE_FILE
.tree()
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
index 090eb89f4..87fd51d70 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/token_ext.rs
@@ -2,7 +2,9 @@
use std::borrow::Cow;
-use rustc_lexer::unescape::{unescape_byte, unescape_char, unescape_literal, Mode};
+use rustc_lexer::unescape::{
+ unescape_byte, unescape_c_string, unescape_char, unescape_literal, CStrUnit, Mode,
+};
use crate::{
ast::{self, AstToken},
@@ -146,6 +148,7 @@ impl QuoteOffsets {
pub trait IsString: AstToken {
const RAW_PREFIX: &'static str;
+ const MODE: Mode;
fn is_raw(&self) -> bool {
self.text().starts_with(Self::RAW_PREFIX)
}
@@ -181,7 +184,7 @@ pub trait IsString: AstToken {
let text = &self.text()[text_range_no_quotes - start];
let offset = text_range_no_quotes.start() - start;
- unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+ unescape_literal(text, Self::MODE, &mut |range, unescaped_char| {
let text_range =
TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
cb(text_range + offset, unescaped_char);
@@ -196,6 +199,7 @@ pub trait IsString: AstToken {
impl IsString for ast::String {
const RAW_PREFIX: &'static str = "r";
+ const MODE: Mode = Mode::Str;
}
impl ast::String {
@@ -213,7 +217,7 @@ impl ast::String {
let mut buf = String::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
+ unescape_literal(text, Self::MODE, &mut |char_range, unescaped_char| match (
unescaped_char,
buf.capacity() == 0,
) {
@@ -239,6 +243,7 @@ impl ast::String {
impl IsString for ast::ByteString {
const RAW_PREFIX: &'static str = "br";
+ const MODE: Mode = Mode::ByteStr;
}
impl ast::ByteString {
@@ -256,7 +261,7 @@ impl ast::ByteString {
let mut buf: Vec<u8> = Vec::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
+ unescape_literal(text, Self::MODE, &mut |char_range, unescaped_char| match (
unescaped_char,
buf.capacity() == 0,
) {
@@ -282,42 +287,70 @@ impl ast::ByteString {
impl IsString for ast::CString {
const RAW_PREFIX: &'static str = "cr";
+ const MODE: Mode = Mode::CStr;
+
+ fn escaped_char_ranges(
+ &self,
+ cb: &mut dyn FnMut(TextRange, Result<char, rustc_lexer::unescape::EscapeError>),
+ ) {
+ let text_range_no_quotes = match self.text_range_between_quotes() {
+ Some(it) => it,
+ None => return,
+ };
+
+ let start = self.syntax().text_range().start();
+ let text = &self.text()[text_range_no_quotes - start];
+ let offset = text_range_no_quotes.start() - start;
+
+ unescape_c_string(text, Self::MODE, &mut |range, unescaped_char| {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ // XXX: This method should only be used for highlighting ranges. The unescaped
+ // char/byte is not used. For simplicity, we return an arbitrary placeholder char.
+ cb(text_range + offset, unescaped_char.map(|_| ' '));
+ });
+ }
}
impl ast::CString {
- pub fn value(&self) -> Option<Cow<'_, str>> {
+ pub fn value(&self) -> Option<Cow<'_, [u8]>> {
if self.is_raw() {
let text = self.text();
let text =
&text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
- return Some(Cow::Borrowed(text));
+ return Some(Cow::Borrowed(text.as_bytes()));
}
let text = self.text();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
- let mut buf = String::new();
+ let mut buf = Vec::new();
let mut prev_end = 0;
let mut has_error = false;
- unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
- unescaped_char,
+ let mut char_buf = [0u8; 4];
+ let mut extend_unit = |buf: &mut Vec<u8>, unit: CStrUnit| match unit {
+ CStrUnit::Byte(b) => buf.push(b),
+ CStrUnit::Char(c) => buf.extend(c.encode_utf8(&mut char_buf).as_bytes()),
+ };
+ unescape_c_string(text, Self::MODE, &mut |char_range, unescaped| match (
+ unescaped,
buf.capacity() == 0,
) {
- (Ok(c), false) => buf.push(c),
+ (Ok(u), false) => extend_unit(&mut buf, u),
(Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
prev_end = char_range.end
}
- (Ok(c), true) => {
+ (Ok(u), true) => {
buf.reserve_exact(text.len());
- buf.push_str(&text[..prev_end]);
- buf.push(c);
+ buf.extend(text[..prev_end].as_bytes());
+ extend_unit(&mut buf, u);
}
(Err(_), _) => has_error = true,
});
match (has_error, buf.capacity() == 0) {
(true, _) => None,
- (false, true) => Some(Cow::Borrowed(text)),
+ (false, true) => Some(Cow::Borrowed(text.as_bytes())),
(false, false) => Some(Cow::Owned(buf)),
}
}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
index efbf87966..4cd668a0c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -171,6 +171,109 @@ impl SourceFile {
}
}
+impl ast::TokenTree {
+ pub fn reparse_as_comma_separated_expr(self) -> Parse<ast::MacroEagerInput> {
+ let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
+
+ let mut parser_input = parser::Input::default();
+ let mut was_joint = false;
+ for t in tokens {
+ let kind = t.kind();
+ if kind.is_trivia() {
+ was_joint = false
+ } else {
+ if kind == SyntaxKind::IDENT {
+ let token_text = t.text();
+ let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
+ .unwrap_or(SyntaxKind::IDENT);
+ parser_input.push_ident(contextual_kw);
+ } else {
+ if was_joint {
+ parser_input.was_joint();
+ }
+ parser_input.push(kind);
+ // Tag the token as joint if it is float with a fractional part
+ // we use this jointness to inform the parser about what token split
+ // event to emit when we encounter a float literal in a field access
+ if kind == SyntaxKind::FLOAT_NUMBER && !t.text().ends_with('.') {
+ parser_input.was_joint();
+ }
+ }
+ was_joint = true;
+ }
+ }
+
+ let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input);
+
+ let mut tokens =
+ self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
+ let mut text = String::new();
+ let mut pos = TextSize::from(0);
+ let mut builder = SyntaxTreeBuilder::default();
+ for event in parser_output.iter() {
+ match event {
+ parser::Step::Token { kind, n_input_tokens } => {
+ let mut token = tokens.next().unwrap();
+ while token.kind().is_trivia() {
+ let text = token.text();
+ pos += TextSize::from(text.len() as u32);
+ builder.token(token.kind(), text);
+
+ token = tokens.next().unwrap();
+ }
+ text.push_str(token.text());
+ for _ in 1..n_input_tokens {
+ let token = tokens.next().unwrap();
+ text.push_str(token.text());
+ }
+
+ pos += TextSize::from(text.len() as u32);
+ builder.token(kind, &text);
+ text.clear();
+ }
+ parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
+ let token = tokens.next().unwrap();
+ let text = token.text();
+
+ match text.split_once('.') {
+ Some((left, right)) => {
+ assert!(!left.is_empty());
+ builder.start_node(SyntaxKind::NAME_REF);
+ builder.token(SyntaxKind::INT_NUMBER, left);
+ builder.finish_node();
+
+ // here we move the exit up, the original exit has been deleted in process
+ builder.finish_node();
+
+ builder.token(SyntaxKind::DOT, ".");
+
+ if has_pseudo_dot {
+ assert!(right.is_empty(), "{left}.{right}");
+ } else {
+ builder.start_node(SyntaxKind::NAME_REF);
+ builder.token(SyntaxKind::INT_NUMBER, right);
+ builder.finish_node();
+
+ // the parser creates an unbalanced start node, we are required to close it here
+ builder.finish_node();
+ }
+ }
+ None => unreachable!(),
+ }
+ pos += TextSize::from(text.len() as u32);
+ }
+ parser::Step::Enter { kind } => builder.start_node(kind),
+ parser::Step::Exit => builder.finish_node(),
+ parser::Step::Error { msg } => builder.error(msg.to_owned(), pos),
+ }
+ }
+
+ let (green, errors) = builder.finish_raw();
+
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+}
+
/// Matches a `SyntaxNode` against an `ast` type.
///
/// # Example:
diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
index c5783b91a..e4db33f1c 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/tests/ast_src.rs
@@ -216,6 +216,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
// macro related
"MACRO_ITEMS",
"MACRO_STMTS",
+ "MACRO_EAGER_INPUT",
],
};
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
index 602baed37..75e7a3fec 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs
@@ -47,13 +47,10 @@
//! ```
//!
//! Metadata allows specifying all settings and variables
-//! that are available in a real rust project:
-//! - crate names via `crate:cratename`
-//! - dependencies via `deps:dep1,dep2`
-//! - configuration settings via `cfg:dbg=false,opt_level=2`
-//! - environment variables via `env:PATH=/bin,RUST_LOG=debug`
+//! that are available in a real rust project. See [`Fixture`]
+//! for the syntax.
//!
-//! Example using all available metadata:
+//! Example using some available metadata:
//! ```
//! "
//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo
@@ -68,17 +65,74 @@ use stdx::trim_indent;
#[derive(Debug, Eq, PartialEq)]
pub struct Fixture {
+ /// Specifies the path for this file. It must start with "/".
pub path: String,
- pub text: String,
+ /// Defines a new crate and make this file its root module.
+ ///
+ /// Version and repository URL of the crate can optionally be specified; if
+ /// either one is specified, the other must also be specified.
+ ///
+ /// Syntax:
+ /// - `crate:my_awesome_lib`
+ /// - `crate:my_awesome_lib@0.0.1,https://example.com/repo.git`
pub krate: Option<String>,
+ /// Specifies dependencies of this crate. This must be used with `crate` meta.
+ ///
+ /// Syntax: `deps:hir-def,ide-assists`
pub deps: Vec<String>,
+ /// Limits crates in the extern prelude. The set of crate names must be a
+ /// subset of `deps`. This must be used with `crate` meta.
+ ///
+ /// If this is not specified, all the dependencies will be in the extern prelude.
+ ///
+ /// Syntax: `extern-prelude:hir-def,ide-assists`
pub extern_prelude: Option<Vec<String>>,
- pub cfg_atoms: Vec<String>,
- pub cfg_key_values: Vec<(String, String)>,
+ /// Specifies configuration options to be enabled. Options may have associated
+ /// values.
+ ///
+ /// Syntax: `cfg:test,dbg=false,opt_level=2`
+ pub cfgs: Vec<(String, Option<String>)>,
+ /// Specifies the edition of this crate. This must be used with `crate` meta. If
+ /// this is not specified, ([`base_db::input::Edition::CURRENT`]) will be used.
+ /// This must be used with `crate` meta.
+ ///
+ /// Syntax: `edition:2021`
pub edition: Option<String>,
+ /// Specifies environment variables.
+ ///
+ /// Syntax: `env:PATH=/bin,RUST_LOG=debug`
pub env: FxHashMap<String, String>,
+ /// Introduces a new [source root](base_db::input::SourceRoot). This file **and
+ /// the following files** will belong the new source root. This must be used
+ /// with `crate` meta.
+ ///
+ /// Use this if you want to test something that uses `SourceRoot::is_library()`
+ /// to check editability.
+ ///
+ /// Note that files before the first fixture with `new_source_root` meta will
+ /// belong to an implicitly defined local source root.
+ ///
+ /// Syntax:
+ /// - `new_source_root:library`
+ /// - `new_source_root:local`
pub introduce_new_source_root: Option<String>,
+ /// Explicitly declares this crate as a library outside current workspace. This
+ /// must be used with `crate` meta.
+ ///
+ /// This is implied if this file belongs to a library source root.
+ ///
+ /// Use this if you want to test something that checks if a crate is a workspace
+ /// member via [`CrateOrigin`](base_db::input::CrateOrigin).
+ ///
+ /// Syntax: `library`
+ pub library: bool,
+ /// Specifies LLVM data layout to be used.
+ ///
+ /// You probably don't want to manually specify this. See LLVM manual for the
+ /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout
pub target_data_layout: Option<String>,
+ /// Actual file contents. All meta comments are stripped.
+ pub text: String,
}
pub struct MiniCore {
@@ -178,23 +232,28 @@ impl FixtureWithProjectMeta {
fn parse_meta_line(meta: &str) -> Fixture {
assert!(meta.starts_with("//-"));
let meta = meta["//-".len()..].trim();
- let components = meta.split_ascii_whitespace().collect::<Vec<_>>();
+ let mut components = meta.split_ascii_whitespace();
- let path = components[0].to_string();
+ let path = components.next().expect("fixture meta must start with a path").to_string();
assert!(path.starts_with('/'), "fixture path does not start with `/`: {path:?}");
let mut krate = None;
let mut deps = Vec::new();
let mut extern_prelude = None;
let mut edition = None;
- let mut cfg_atoms = Vec::new();
- let mut cfg_key_values = Vec::new();
+ let mut cfgs = Vec::new();
let mut env = FxHashMap::default();
let mut introduce_new_source_root = None;
+ let mut library = false;
let mut target_data_layout = Some(
"e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_string(),
);
- for component in components[1..].iter() {
+ for component in components {
+ if component == "library" {
+ library = true;
+ continue;
+ }
+
let (key, value) =
component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}"));
match key {
@@ -212,8 +271,8 @@ impl FixtureWithProjectMeta {
"cfg" => {
for entry in value.split(',') {
match entry.split_once('=') {
- Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())),
- None => cfg_atoms.push(entry.to_string()),
+ Some((k, v)) => cfgs.push((k.to_string(), Some(v.to_string()))),
+ None => cfgs.push((entry.to_string(), None)),
}
}
}
@@ -243,11 +302,11 @@ impl FixtureWithProjectMeta {
krate,
deps,
extern_prelude,
- cfg_atoms,
- cfg_key_values,
+ cfgs,
edition,
env,
introduce_new_source_root,
+ library,
target_data_layout,
}
}
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 266bc2391..c765f4244 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -20,6 +20,7 @@
//! deref_mut: deref
//! deref: sized
//! derive:
+//! discriminant:
//! drop:
//! eq: sized
//! error: fmt
@@ -36,6 +37,7 @@
//! iterator: option
//! iterators: iterator, fn
//! manually_drop: drop
+//! non_null:
//! non_zero:
//! option: panic
//! ord: eq, option
@@ -129,6 +131,14 @@ pub mod marker {
#[lang = "phantom_data"]
pub struct PhantomData<T: ?Sized>;
// endregion:phantom_data
+
+ // region:discriminant
+ #[lang = "discriminant_kind"]
+ pub trait DiscriminantKind {
+ #[lang = "discriminant_type"]
+ type Discriminant;
+ }
+ // endregion:discriminant
}
// region:default
@@ -354,6 +364,11 @@ pub mod mem {
pub fn size_of<T>() -> usize;
}
// endregion:size_of
+
+ // region:discriminant
+ use crate::marker::DiscriminantKind;
+ pub struct Discriminant<T>(<T as DiscriminantKind>::Discriminant);
+ // endregion:discriminant
}
pub mod ptr {
@@ -377,6 +392,19 @@ pub mod ptr {
type Metadata;
}
// endregion:pointee
+ // region:non_null
+ #[rustc_layout_scalar_valid_range_start(1)]
+ #[rustc_nonnull_optimization_guaranteed]
+ pub struct NonNull<T: ?Sized> {
+ pointer: *const T,
+ }
+ // region:coerce_unsized
+ impl<T: ?Sized, U: ?Sized> crate::ops::CoerceUnsized<NonNull<U>> for NonNull<T> where
+ T: crate::marker::Unsize<U>
+ {
+ }
+ // endregion:coerce_unsized
+ // endregion:non_null
}
pub mod ops {
@@ -1287,6 +1315,11 @@ mod macros {
pub macro derive($item:item) {
/* compiler built-in */
}
+
+ #[rustc_builtin_macro]
+ pub macro derive_const($item:item) {
+ /* compiler built-in */
+ }
}
// endregion:derive
@@ -1354,24 +1387,24 @@ pub mod error {
pub mod prelude {
pub mod v1 {
pub use crate::{
- clone::Clone, // :clone
- cmp::{Eq, PartialEq}, // :eq
- cmp::{Ord, PartialOrd}, // :ord
- convert::AsRef, // :as_ref
- convert::{From, Into}, // :from
- default::Default, // :default
- iter::{IntoIterator, Iterator}, // :iterator
- macros::builtin::derive, // :derive
- marker::Copy, // :copy
- marker::Send, // :send
- marker::Sized, // :sized
- marker::Sync, // :sync
- mem::drop, // :drop
- ops::Drop, // :drop
- ops::{Fn, FnMut, FnOnce}, // :fn
- option::Option::{self, None, Some}, // :option
- panic, // :panic
- result::Result::{self, Err, Ok}, // :result
+ clone::Clone, // :clone
+ cmp::{Eq, PartialEq}, // :eq
+ cmp::{Ord, PartialOrd}, // :ord
+ convert::AsRef, // :as_ref
+ convert::{From, Into}, // :from
+ default::Default, // :default
+ iter::{IntoIterator, Iterator}, // :iterator
+ macros::builtin::{derive, derive_const}, // :derive
+ marker::Copy, // :copy
+ marker::Send, // :send
+ marker::Sized, // :sized
+ marker::Sync, // :sync
+ mem::drop, // :drop
+ ops::Drop, // :drop
+ ops::{Fn, FnMut, FnOnce}, // :fn
+ option::Option::{self, None, Some}, // :option
+ panic, // :panic
+ result::Result::{self, Err, Ok}, // :result
};
}
diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
index 0615a3763..cade2e9f6 100644
--- a/src/tools/rust-analyzer/crates/tt/src/buffer.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
@@ -134,7 +134,7 @@ pub enum TokenTreeRef<'a, Span> {
Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
}
-impl<'a, Span: Clone> TokenTreeRef<'a, Span> {
+impl<Span: Clone> TokenTreeRef<'_, Span> {
pub fn cloned(&self) -> TokenTree<Span> {
match self {
TokenTreeRef::Subtree(subtree, tt) => match tt {
@@ -153,13 +153,13 @@ pub struct Cursor<'a, Span> {
ptr: EntryPtr,
}
-impl<'a, Span> PartialEq for Cursor<'a, Span> {
+impl<Span> PartialEq for Cursor<'_, Span> {
fn eq(&self, other: &Cursor<'_, Span>) -> bool {
self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer)
}
}
-impl<'a, Span> Eq for Cursor<'a, Span> {}
+impl<Span> Eq for Cursor<'_, Span> {}
impl<'a, Span> Cursor<'a, Span> {
/// Check whether it is eof
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
index c2ebf0374..b5a72bec0 100644
--- a/src/tools/rust-analyzer/crates/tt/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -65,7 +65,22 @@ pub mod token_id {
}
impl TokenTree {
pub const fn empty() -> Self {
- Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] })
+ Self::Subtree(Subtree::empty())
+ }
+ }
+
+ impl Subtree {
+ pub fn visit_ids(&mut self, f: &mut impl FnMut(TokenId) -> TokenId) {
+ self.delimiter.open = f(self.delimiter.open);
+ self.delimiter.close = f(self.delimiter.close);
+ self.token_trees.iter_mut().for_each(|tt| match tt {
+ crate::TokenTree::Leaf(leaf) => match leaf {
+ crate::Leaf::Literal(it) => it.span = f(it.span),
+ crate::Leaf::Punct(it) => it.span = f(it.span),
+ crate::Leaf::Ident(it) => it.span = f(it.span),
+ },
+ crate::TokenTree::Subtree(s) => s.visit_ids(f),
+ })
}
}
}
@@ -107,7 +122,6 @@ impl_from!(Literal<Span>, Punct<Span>, Ident<Span> for Leaf);
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Subtree<Span> {
- // FIXME, this should not be Option
pub delimiter: Delimiter<Span>,
pub token_trees: Vec<TokenTree<Span>>,
}
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
index 5d61a2272..95c514251 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -15,7 +15,8 @@ doctest = false
tracing = "0.1.35"
walkdir = "2.3.2"
crossbeam-channel = "0.5.5"
-notify = "5.0"
+# We demand 5.1.0 as any higher version pulls in a new windows-sys dupe
+notify = "=5.1.0"
stdx.workspace = true
vfs.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
index 3ae3dc83c..c35785cf9 100644
--- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
@@ -14,7 +14,7 @@ doctest = false
[dependencies]
rustc-hash = "1.1.0"
fst = "0.4.7"
-indexmap = "1.9.1"
+indexmap = "2.0.0"
nohash-hasher.workspace = true
paths.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
index d327f2edf..52ada32bd 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
@@ -292,8 +292,8 @@ impl From<AbsPathBuf> for VfsPath {
impl fmt::Display for VfsPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.0 {
- VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f),
- VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Display::fmt(it, f),
+ VfsPathRepr::PathBuf(it) => it.fmt(f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => it.fmt(f),
}
}
}
@@ -307,8 +307,8 @@ impl fmt::Debug for VfsPath {
impl fmt::Debug for VfsPathRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self {
- VfsPathRepr::PathBuf(it) => fmt::Debug::fmt(&it.display(), f),
- VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Debug::fmt(&it, f),
+ VfsPathRepr::PathBuf(it) => it.fmt(f),
+ VfsPathRepr::VirtualPath(VirtualPath(it)) => it.fmt(f),
}
}
}
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
index bc58aa722..024acb877 100644
--- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
<!---
-lsp_ext.rs hash: 2d60bbffe70ae198
+lsp_ext.rs hash: 149a5be3c5e469d1
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@@ -886,3 +886,48 @@ export interface FetchDependencyListResult {
}
```
Returns all crates from this workspace, so it can be used create a viewTree to help navigate the dependency tree.
+
+## View Recursive Memory Layout
+
+**Method:** `rust-analyzer/viewRecursiveMemoryLayout`
+
+**Request:** `TextDocumentPositionParams`
+
+**Response:**
+
+```typescript
+export interface RecursiveMemoryLayoutNode = {
+ /// Name of the item, or [ROOT], `.n` for tuples
+ item_name: string;
+ /// Full name of the type (type aliases are ignored)
+ typename: string;
+ /// Size of the type in bytes
+ size: number;
+ /// Alignment of the type in bytes
+ alignment: number;
+ /// Offset of the type relative to its parent (or 0 if its the root)
+ offset: number;
+ /// Index of the node's parent (or -1 if its the root)
+ parent_idx: number;
+ /// Index of the node's children (or -1 if it does not have children)
+ children_start: number;
+ /// Number of child nodes (unspecified it does not have children)
+ children_len: number;
+};
+
+export interface RecursiveMemoryLayout = {
+ nodes: RecursiveMemoryLayoutNode[];
+};
+```
+
+Returns a vector of nodes representing items in the datatype as a tree, `RecursiveMemoryLayout::nodes[0]` is the root node.
+
+If `RecursiveMemoryLayout::nodes::length == 0` we could not find a suitable type.
+
+Generic Types do not give anything because they are incomplete. Fully specified generic types do not give anything if they are selected directly but do work when a child of other types [this is consistent with other behavior](https://github.com/rust-lang/rust-analyzer/issues/15010).
+
+### Unresolved questions:
+
+- How should enums/unions be represented? currently they do not produce any children because they have multiple distinct sets of children.
+- Should niches be represented? currently they are not reported.
+- A visual representation of the memory layout is not specified, see the provided implementation for an example, however it may not translate well to terminal based editors or other such things.
diff --git a/src/tools/rust-analyzer/docs/dev/style.md b/src/tools/rust-analyzer/docs/dev/style.md
index d2a03fba4..786127639 100644
--- a/src/tools/rust-analyzer/docs/dev/style.md
+++ b/src/tools/rust-analyzer/docs/dev/style.md
@@ -869,6 +869,19 @@ type -> ty
**Rationale:** consistency.
+## Error Handling Trivia
+
+Use `anyhow::Result` rather than just `Result`.
+
+**Rationale:** makes it immediately clear what result that is.
+
+Use `anyhow::format_err!` rather than `anyhow::anyhow`.
+
+**Rationale:** consistent, boring, avoids stuttering.
+
+There's no specific guidance on the formatting of error messages, see [anyhow/#209](https://github.com/dtolnay/anyhow/issues/209).
+Do not end error and context messages with `.` though.
+
## Early Returns
Do use early returns
diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc
index b5c095fd9..5dafd1a4c 100644
--- a/src/tools/rust-analyzer/docs/user/manual.adoc
+++ b/src/tools/rust-analyzer/docs/user/manual.adoc
@@ -64,22 +64,8 @@ You can install the latest release of the plugin from
https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
Note that the plugin may cause conflicts with the
-https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin].
-It is recommended to disable the Rust plugin when using the rust-analyzer extension.
-
-By default, the plugin will prompt you to download the matching version of the server as well:
-
-image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
-
-[NOTE]
-====
-To disable this notification put the following to `settings.json`
-
-[source,json]
-----
-{ "rust-analyzer.updates.askBeforeDownload": false }
-----
-====
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[previous official Rust plugin].
+The latter is no longer maintained and should be uninstalled.
The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
@@ -141,6 +127,9 @@ If you're not using Code, you can compile and install only the LSP server:
$ cargo xtask install --server
----
+Make sure that `.cargo/bin` is in `$PATH` and precedes paths where `rust-analyzer` may also be installed.
+Specifically, `rustup` includes a proxy called `rust-analyzer`, which can cause problems if you're planning to use a source build or even a downloaded binary.
+
=== rust-analyzer Language Server Binary
Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
@@ -280,12 +269,12 @@ Also see the https://emacs-lsp.github.io/lsp-mode/page/lsp-rust-analyzer/[rust-a
Note the excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm] on how to set-up Emacs for Rust development with LSP mode and several other packages.
-=== Vim/NeoVim
+=== Vim/Neovim
Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
-There are several LSP client implementations for vim or neovim:
+There are several LSP client implementations for Vim or Neovim:
==== coc-rust-analyzer
@@ -308,7 +297,7 @@ Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected
https://github.com/autozimu/LanguageClient-neovim[here]
* The GitHub project wiki has extra tips on configuration
-2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
+2. Configure by adding this to your Vim/Neovim config file (replacing the existing Rust-specific line if it exists):
+
[source,vim]
----
@@ -335,7 +324,7 @@ let g:ale_linters = {'rust': ['analyzer']}
==== nvim-lsp
-NeoVim 0.5 has built-in language server support.
+Neovim 0.5 has built-in language server support.
For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
@@ -376,7 +365,7 @@ EOF
See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
-Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim.
+Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for Neovim.
==== vim-lsp
@@ -460,27 +449,24 @@ You'll need to close and reopen all .rs and Cargo files, or to restart the IDE,
Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
-Earlier versions allow you to use rust-analyzer through a simple settings change.
-In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
-Then in the configuration replace:
-[source,json]
-----
- "rust": {
- "command": ["rls"],
- "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
- "url": "https://github.com/rust-lang/rls",
- "highlightingModeRegex": "^Rust$"
- },
-----
-With
+To change rust-analyzer config options, start from the following example and put it into Kate's "User Server Settings" tab (located under the LSP Client settings):
[source,json]
----
+{
+ "servers": {
"rust": {
- "command": ["rust-analyzer"],
- "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
- "url": "https://github.com/rust-lang/rust-analyzer",
- "highlightingModeRegex": "^Rust$"
- },
+ "initializationOptions": {
+ "cachePriming": {
+ "enable": false
+ },
+ "check": {
+ "allTargets": false
+ },
+ "checkOnSave": false
+ }
+ }
+ }
+}
----
Then click on apply, and restart the LSP server for your rust project.
@@ -933,17 +919,17 @@ For example:
More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
==== Setting runnable environment variables
-You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables.
+You can use "rust-analyzer.runnables.extraEnv" setting to define runnable environment-specific substitution variables.
The simplest way for all runnables in a bunch:
```jsonc
-"rust-analyzer.runnableEnv": {
+"rust-analyzer.runnables.extraEnv": {
"RUN_SLOW_TESTS": "1"
}
```
Or it is possible to specify vars more granularly:
```jsonc
-"rust-analyzer.runnableEnv": [
+"rust-analyzer.runnables.extraEnv": [
{
// "mask": null, // null mask means that this rule will be applied for all runnables
env: {
@@ -963,6 +949,29 @@ Or it is possible to specify vars more granularly:
You can use any valid regular expression as a mask.
Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
+If needed, you can set different values for different platforms:
+```jsonc
+"rust-analyzer.runnables.extraEnv": [
+ {
+ "platform": "win32", // windows only
+ env: {
+ "APP_DATA": "windows specific data"
+ }
+ },
+ {
+ "platform": ["linux"],
+ "env": {
+ "APP_DATA": "linux data",
+ }
+ },
+ { // for all platforms
+ "env": {
+ "APP_COMMON_DATA": "xxx",
+ }
+ }
+]
+```
+
==== Compiler feedback from external commands
Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
diff --git a/src/tools/rust-analyzer/lib/README.md b/src/tools/rust-analyzer/lib/README.md
index ed55e31d6..d420eeb96 100644
--- a/src/tools/rust-analyzer/lib/README.md
+++ b/src/tools/rust-analyzer/lib/README.md
@@ -3,3 +3,12 @@
Crates in this directory are published to [crates.io](https://crates.io) and obey semver.
They _could_ live in a separate repo, but we want to experiment with a monorepo setup.
+
+We use these crates from crates.io, not the local copies because we want to ensure that
+rust-analyzer works with the versions that are published. This means if you add a new API to these
+crates, you need to release a new version to crates.io before you can use that API in rust-analyzer.
+
+To release new versions of these packages, change their version in Cargo.toml. Once your PR is merged into master a workflow will automatically publish the new version to crates.io.
+
+While prototyping, the local versions can be used by uncommenting the relevant lines in the
+`[patch.'crates-io']` section in Cargo.toml
diff --git a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
index ec5ba8ba0..01f2b87b3 100644
--- a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "la-arena"
-version = "0.3.0"
+version = "0.3.1"
description = "Simple index-based arena without deletion."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/la-arena"
diff --git a/src/tools/rust-analyzer/lib/line-index/Cargo.toml b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
index 019ad3a53..6c0d06f47 100644
--- a/src/tools/rust-analyzer/lib/line-index/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/line-index/Cargo.toml
@@ -1,11 +1,11 @@
[package]
name = "line-index"
-version = "0.1.0-pre.1"
+version = "0.1.0"
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
edition = "2021"
[dependencies]
-text-size.workspace = true
-nohash-hasher.workspace = true
+text-size = "1.1.0"
+nohash-hasher = "0.2.0"
diff --git a/src/tools/rust-analyzer/lib/line-index/src/lib.rs b/src/tools/rust-analyzer/lib/line-index/src/lib.rs
index ad67d3f24..03371c9c8 100644
--- a/src/tools/rust-analyzer/lib/line-index/src/lib.rs
+++ b/src/tools/rust-analyzer/lib/line-index/src/lib.rs
@@ -94,44 +94,7 @@ pub struct LineIndex {
impl LineIndex {
/// Returns a `LineIndex` for the `text`.
pub fn new(text: &str) -> LineIndex {
- let mut newlines = Vec::<TextSize>::with_capacity(16);
- let mut line_wide_chars = IntMap::<u32, Box<[WideChar]>>::default();
-
- let mut wide_chars = Vec::<WideChar>::new();
- let mut cur_row = TextSize::from(0);
- let mut cur_col = TextSize::from(0);
- let mut line = 0u32;
-
- for c in text.chars() {
- let c_len = TextSize::of(c);
- cur_row += c_len;
- if c == '\n' {
- newlines.push(cur_row);
-
- // Save any wide characters seen in the previous line
- if !wide_chars.is_empty() {
- let cs = std::mem::take(&mut wide_chars).into_boxed_slice();
- line_wide_chars.insert(line, cs);
- }
-
- // Prepare for processing the next line
- cur_col = TextSize::from(0);
- line += 1;
- continue;
- }
-
- if !c.is_ascii() {
- wide_chars.push(WideChar { start: cur_col, end: cur_col + c_len });
- }
-
- cur_col += c_len;
- }
-
- // Save any wide characters seen in the last line
- if !wide_chars.is_empty() {
- line_wide_chars.insert(line, wide_chars.into_boxed_slice());
- }
-
+ let (newlines, line_wide_chars) = analyze_source_file(text);
LineIndex {
newlines: newlines.into_boxed_slice(),
line_wide_chars,
@@ -235,3 +198,182 @@ impl LineIndex {
self.len
}
}
+
+/// This is adapted from the rustc_span crate, https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs
+fn analyze_source_file(src: &str) -> (Vec<TextSize>, IntMap<u32, Box<[WideChar]>>) {
+ assert!(src.len() < !0u32 as usize);
+ let mut lines = vec![];
+ let mut line_wide_chars = IntMap::<u32, Vec<WideChar>>::default();
+
+ // Calls the right implementation, depending on hardware support available.
+ analyze_source_file_dispatch(src, &mut lines, &mut line_wide_chars);
+
+ (lines, line_wide_chars.into_iter().map(|(k, v)| (k, v.into_boxed_slice())).collect())
+}
+
+#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+fn analyze_source_file_dispatch(
+ src: &str,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) {
+ if is_x86_feature_detected!("sse2") {
+ // SAFETY: SSE2 support was checked
+ unsafe {
+ analyze_source_file_sse2(src, lines, multi_byte_chars);
+ }
+ } else {
+ analyze_source_file_generic(src, src.len(), TextSize::from(0), lines, multi_byte_chars);
+ }
+}
+
+/// Checks 16 byte chunks of text at a time. If the chunk contains
+/// something other than printable ASCII characters and newlines, the
+/// function falls back to the generic implementation. Otherwise it uses
+/// SSE2 intrinsics to quickly find all newlines.
+#[target_feature(enable = "sse2")]
+#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
+unsafe fn analyze_source_file_sse2(
+ src: &str,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) {
+ #[cfg(target_arch = "x86")]
+ use std::arch::x86::*;
+ #[cfg(target_arch = "x86_64")]
+ use std::arch::x86_64::*;
+
+ const CHUNK_SIZE: usize = 16;
+
+ let src_bytes = src.as_bytes();
+
+ let chunk_count = src.len() / CHUNK_SIZE;
+
+ // This variable keeps track of where we should start decoding a
+ // chunk. If a multi-byte character spans across chunk boundaries,
+ // we need to skip that part in the next chunk because we already
+ // handled it.
+ let mut intra_chunk_offset = 0;
+
+ for chunk_index in 0..chunk_count {
+ let ptr = src_bytes.as_ptr() as *const __m128i;
+ // We don't know if the pointer is aligned to 16 bytes, so we
+ // use `loadu`, which supports unaligned loading.
+ let chunk = _mm_loadu_si128(ptr.add(chunk_index));
+
+ // For character in the chunk, see if its byte value is < 0, which
+ // indicates that it's part of a UTF-8 char.
+ let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0));
+ // Create a bit mask from the comparison results.
+ let multibyte_mask = _mm_movemask_epi8(multibyte_test);
+
+ // If the bit mask is all zero, we only have ASCII chars here:
+ if multibyte_mask == 0 {
+ assert!(intra_chunk_offset == 0);
+
+ // Check for newlines in the chunk
+ let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8));
+ let newlines_mask = _mm_movemask_epi8(newlines_test);
+
+ if newlines_mask != 0 {
+ // All control characters are newlines, record them
+ let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32;
+ let output_offset = TextSize::from((chunk_index * CHUNK_SIZE + 1) as u32);
+
+ loop {
+ let index = newlines_mask.trailing_zeros();
+
+ if index >= CHUNK_SIZE as u32 {
+ // We have arrived at the end of the chunk.
+ break;
+ }
+
+ lines.push(TextSize::from(index) + output_offset);
+
+ // Clear the bit, so we can find the next one.
+ newlines_mask &= (!1) << index;
+ }
+ }
+ continue;
+ }
+
+ // The slow path.
+ // There are control chars in here, fallback to generic decoding.
+ let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset;
+ intra_chunk_offset = analyze_source_file_generic(
+ &src[scan_start..],
+ CHUNK_SIZE - intra_chunk_offset,
+ TextSize::from(scan_start as u32),
+ lines,
+ multi_byte_chars,
+ );
+ }
+
+ // There might still be a tail left to analyze
+ let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset;
+ if tail_start < src.len() {
+ analyze_source_file_generic(
+ &src[tail_start..],
+ src.len() - tail_start,
+ TextSize::from(tail_start as u32),
+ lines,
+ multi_byte_chars,
+ );
+ }
+}
+
+#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
+// The target (or compiler version) does not support SSE2 ...
+fn analyze_source_file_dispatch(
+ src: &str,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) {
+ analyze_source_file_generic(src, src.len(), TextSize::from(0), lines, multi_byte_chars);
+}
+
+// `scan_len` determines the number of bytes in `src` to scan. Note that the
+// function can read past `scan_len` if a multi-byte character start within the
+// range but extends past it. The overflow is returned by the function.
+fn analyze_source_file_generic(
+ src: &str,
+ scan_len: usize,
+ output_offset: TextSize,
+ lines: &mut Vec<TextSize>,
+ multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>,
+) -> usize {
+ assert!(src.len() >= scan_len);
+ let mut i = 0;
+ let src_bytes = src.as_bytes();
+
+ while i < scan_len {
+ let byte = unsafe {
+ // We verified that i < scan_len <= src.len()
+ *src_bytes.get_unchecked(i)
+ };
+
+ // How much to advance in order to get to the next UTF-8 char in the
+ // string.
+ let mut char_len = 1;
+
+ if byte == b'\n' {
+ lines.push(TextSize::from(i as u32 + 1) + output_offset);
+ } else if byte >= 127 {
+ // The slow path: Just decode to `char`.
+ let c = src[i..].chars().next().unwrap();
+ char_len = c.len_utf8();
+
+ let pos = TextSize::from(i as u32) + output_offset;
+
+ if char_len > 1 {
+ assert!((2..=4).contains(&char_len));
+ let mbc = WideChar { start: pos, end: pos + TextSize::from(char_len as u32) };
+ multi_byte_chars.entry(lines.len() as u32).or_default().push(mbc);
+ }
+ }
+
+ i += char_len;
+ }
+
+ i - scan_len
+}
diff --git a/src/tools/rust-analyzer/lib/line-index/src/tests.rs b/src/tools/rust-analyzer/lib/line-index/src/tests.rs
index 31c01c20e..8f3762d19 100644
--- a/src/tools/rust-analyzer/lib/line-index/src/tests.rs
+++ b/src/tools/rust-analyzer/lib/line-index/src/tests.rs
@@ -1,11 +1,120 @@
-use super::LineIndex;
-
-#[test]
-fn test_empty_index() {
- let col_index = LineIndex::new(
- "
-const C: char = 'x';
-",
- );
- assert_eq!(col_index.line_wide_chars.len(), 0);
+use crate::{LineIndex, TextSize, WideChar};
+
+macro_rules! test {
+ (
+ case: $test_name:ident,
+ text: $text:expr,
+ lines: $lines:expr,
+ multi_byte_chars: $multi_byte_chars:expr,
+ ) => {
+ #[test]
+ fn $test_name() {
+ let line_index = LineIndex::new($text);
+
+ let expected_lines: Vec<TextSize> =
+ $lines.into_iter().map(<TextSize as From<u32>>::from).collect();
+
+ assert_eq!(&*line_index.newlines, &*expected_lines);
+
+ let expected_mbcs: Vec<_> = $multi_byte_chars
+ .into_iter()
+ .map(|(line, (pos, end)): (u32, (u32, u32))| {
+ (line, WideChar { start: TextSize::from(pos), end: TextSize::from(end) })
+ })
+ .collect();
+
+ assert_eq!(
+ line_index
+ .line_wide_chars
+ .iter()
+ .flat_map(|(line, val)| std::iter::repeat(*line).zip(val.iter().copied()))
+ .collect::<Vec<_>>(),
+ expected_mbcs
+ );
+ }
+ };
}
+
+test!(
+ case: empty_text,
+ text: "",
+ lines: vec![],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: newlines_short,
+ text: "a\nc",
+ lines: vec![2],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: newlines_long,
+ text: "012345678\nabcdef012345678\na",
+ lines: vec![10, 26],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: newline_and_multi_byte_char_in_same_chunk,
+ text: "01234β789\nbcdef0123456789abcdef",
+ lines: vec![11],
+ multi_byte_chars: vec![(0, (5, 7))],
+);
+
+test!(
+ case: newline_and_control_char_in_same_chunk,
+ text: "01234\u{07}6789\nbcdef0123456789abcdef",
+ lines: vec![11],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_short,
+ text: "aβc",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (1, 3))],
+);
+
+test!(
+ case: multi_byte_char_long,
+ text: "0123456789abcΔf012345β",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (13, 15)), (0, (22, 24))],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary,
+ text: "0123456789abcdeΔ123456789abcdef01234",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (15, 17))],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary_tail,
+ text: "0123456789abcdeΔ....",
+ lines: vec![],
+ multi_byte_chars: vec![(0, (15, 17))],
+);
+
+test!(
+ case: multi_byte_with_new_lines,
+ text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
+ lines: vec![7, 27],
+ multi_byte_chars: vec![(1, (13, 15)), (2, (29, 31))],
+);
+
+test!(
+ case: trailing_newline,
+ text: "0123456789\n",
+ lines: vec![11],
+ multi_byte_chars: vec![],
+);
+
+test!(
+ case: trailing_newline_chunk_boundary,
+ text: "0123456789abcde\n",
+ lines: vec![16],
+ multi_byte_chars: vec![],
+);
diff --git a/src/tools/rust-analyzer/lib/line-index/tests/it.rs b/src/tools/rust-analyzer/lib/line-index/tests/it.rs
deleted file mode 100644
index ce1c0bc6f..000000000
--- a/src/tools/rust-analyzer/lib/line-index/tests/it.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-use line_index::{LineCol, LineIndex, TextRange};
-
-#[test]
-fn test_line_index() {
- let text = "hello\nworld";
- let table = [
- (00, 0, 0),
- (01, 0, 1),
- (05, 0, 5),
- (06, 1, 0),
- (07, 1, 1),
- (08, 1, 2),
- (10, 1, 4),
- (11, 1, 5),
- ];
-
- let index = LineIndex::new(text);
- for (offset, line, col) in table {
- assert_eq!(index.line_col(offset.into()), LineCol { line, col });
- }
-
- let text = "\nhello\nworld";
- let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
- let index = LineIndex::new(text);
- for (offset, line, col) in table {
- assert_eq!(index.line_col(offset.into()), LineCol { line, col });
- }
-}
-
-#[test]
-fn test_char_len() {
- assert_eq!('メ'.len_utf8(), 3);
- assert_eq!('メ'.len_utf16(), 1);
-}
-
-#[test]
-fn test_splitlines() {
- fn r(lo: u32, hi: u32) -> TextRange {
- TextRange::new(lo.into(), hi.into())
- }
-
- let text = "a\nbb\nccc\n";
- let line_index = LineIndex::new(text);
-
- let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
- let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
- assert_eq!(actual, expected);
-
- let text = "";
- let line_index = LineIndex::new(text);
-
- let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
- let expected = vec![];
- assert_eq!(actual, expected);
-
- let text = "\n";
- let line_index = LineIndex::new(text);
-
- let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
- let expected = vec![r(0, 1)];
- assert_eq!(actual, expected)
-}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
index e78a9d2eb..01707d301 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
+++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "lsp-server"
-version = "0.7.0"
+version = "0.7.2"
description = "Generic LSP server scaffold."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
@@ -8,8 +8,8 @@ edition = "2021"
[dependencies]
log = "0.4.17"
-serde_json.workspace = true
-serde.workspace = true
+serde_json = "1.0.96"
+serde = { version = "1.0.156", features = ["derive"] }
crossbeam-channel = "0.5.6"
[dev-dependencies]
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
index b241561f9..730ad51f4 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs
@@ -265,7 +265,7 @@ fn read_msg_text(inp: &mut dyn BufRead) -> io::Result<Option<String>> {
let header_name = parts.next().unwrap();
let header_value =
parts.next().ok_or_else(|| invalid_data!("malformed header: {:?}", buf))?;
- if header_name == "Content-Length" {
+ if header_name.eq_ignore_ascii_case("Content-Length") {
size = Some(header_value.parse::<usize>().map_err(invalid_data)?);
}
}
diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml
index a910e012b..f0cd35399 100644
--- a/src/tools/rust-analyzer/triagebot.toml
+++ b/src/tools/rust-analyzer/triagebot.toml
@@ -9,3 +9,7 @@ allow-unauthenticated = [
[autolabel."S-waiting-on-review"]
new_pr = true
+
+[no-merges]
+exclude_labels = ["sync"]
+labels = ["has-merge-commits", "S-waiting-on-author"]
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
index b4b294c30..7a34617e2 100644
--- a/src/tools/rust-analyzer/xtask/Cargo.toml
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -4,7 +4,7 @@ version = "0.1.0"
publish = false
license = "MIT OR Apache-2.0"
edition = "2021"
-rust-version = "1.65"
+rust-version.workspace = true
[dependencies]
anyhow = "1.0.62"
diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs
index 210047970..7720ad69f 100644
--- a/src/tools/rust-analyzer/xtask/src/flags.rs
+++ b/src/tools/rust-analyzer/xtask/src/flags.rs
@@ -1,5 +1,7 @@
#![allow(unreachable_pub)]
+use std::str::FromStr;
+
use crate::install::{ClientOpt, Malloc, ServerOpt};
xflags::xflags! {
@@ -42,7 +44,7 @@ xflags::xflags! {
required changelog: String
}
cmd metrics {
- optional --dry-run
+ optional measurement_type: MeasurementType
}
/// Builds a benchmark version of rust-analyzer and puts it into `./target`.
cmd bb {
@@ -106,8 +108,31 @@ pub struct PublishReleaseNotes {
}
#[derive(Debug)]
+pub enum MeasurementType {
+ Build,
+ AnalyzeSelf,
+ AnalyzeRipgrep,
+ AnalyzeWebRender,
+ AnalyzeDiesel,
+}
+
+impl FromStr for MeasurementType {
+ type Err = String;
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "build" => Ok(Self::Build),
+ "self" => Ok(Self::AnalyzeSelf),
+ "ripgrep" => Ok(Self::AnalyzeRipgrep),
+ "webrender" => Ok(Self::AnalyzeWebRender),
+ "diesel" => Ok(Self::AnalyzeDiesel),
+ _ => Err("Invalid option".to_string()),
+ }
+ }
+}
+
+#[derive(Debug)]
pub struct Metrics {
- pub dry_run: bool,
+ pub measurement_type: Option<MeasurementType>,
}
#[derive(Debug)]
diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs
index 83223a551..e8c00c72e 100644
--- a/src/tools/rust-analyzer/xtask/src/install.rs
+++ b/src/tools/rust-analyzer/xtask/src/install.rs
@@ -2,13 +2,13 @@
use std::{env, path::PathBuf, str};
-use anyhow::{bail, format_err, Context, Result};
+use anyhow::{bail, format_err, Context};
use xshell::{cmd, Shell};
use crate::flags;
impl flags::Install {
- pub(crate) fn run(self, sh: &Shell) -> Result<()> {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
if cfg!(target_os = "macos") {
fix_path_for_mac(sh).context("Fix path for mac")?;
}
@@ -39,7 +39,7 @@ pub(crate) enum Malloc {
Jemalloc,
}
-fn fix_path_for_mac(sh: &Shell) -> Result<()> {
+fn fix_path_for_mac(sh: &Shell) -> anyhow::Result<()> {
let mut vscode_path: Vec<PathBuf> = {
const COMMON_APP_PATH: &str =
r"/Applications/Visual Studio Code.app/Contents/Resources/app/bin";
@@ -68,7 +68,7 @@ fn fix_path_for_mac(sh: &Shell) -> Result<()> {
Ok(())
}
-fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> {
+fn install_client(sh: &Shell, client_opt: ClientOpt) -> anyhow::Result<()> {
let _dir = sh.push_dir("./editors/code");
// Package extension.
@@ -129,7 +129,7 @@ fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> {
Ok(())
}
-fn install_server(sh: &Shell, opts: ServerOpt) -> Result<()> {
+fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> {
let features = match opts.malloc {
Malloc::System => &[][..],
Malloc::Mimalloc => &["--features", "mimalloc"],
diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs
index b6f730dbf..685374231 100644
--- a/src/tools/rust-analyzer/xtask/src/metrics.rs
+++ b/src/tools/rust-analyzer/xtask/src/metrics.rs
@@ -1,6 +1,6 @@
use std::{
collections::BTreeMap,
- env, fs,
+ fs,
io::Write as _,
path::Path,
time::{Instant, SystemTime, UNIX_EPOCH},
@@ -9,16 +9,13 @@ use std::{
use anyhow::{bail, format_err};
use xshell::{cmd, Shell};
-use crate::flags;
+use crate::flags::{self, MeasurementType};
type Unit = String;
impl flags::Metrics {
pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
let mut metrics = Metrics::new(sh)?;
- if !self.dry_run {
- sh.remove_path("./target/release")?;
- }
if !Path::new("./target/rustc-perf").exists() {
sh.create_dir("./target/rustc-perf")?;
cmd!(sh, "git clone https://github.com/rust-lang/rustc-perf.git ./target/rustc-perf")
@@ -32,38 +29,47 @@ impl flags::Metrics {
let _env = sh.push_env("RA_METRICS", "1");
- {
- // https://github.com/rust-lang/rust-analyzer/issues/9997
- let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender");
- cmd!(sh, "cargo update -p url --precise 1.6.1").run()?;
- }
- metrics.measure_build(sh)?;
- metrics.measure_analysis_stats_self(sh)?;
- metrics.measure_analysis_stats(sh, "ripgrep")?;
- metrics.measure_analysis_stats(sh, "webrender")?;
- metrics.measure_analysis_stats(sh, "diesel/diesel")?;
-
- if !self.dry_run {
- let _d = sh.push_dir("target");
- let metrics_token = env::var("METRICS_TOKEN").unwrap();
- cmd!(
- sh,
- "git clone --depth 1 https://{metrics_token}@github.com/rust-analyzer/metrics.git"
- )
- .run()?;
-
- {
- let mut file =
- fs::File::options().append(true).open("target/metrics/metrics.json")?;
- writeln!(file, "{}", metrics.json())?;
+ let filename = match self.measurement_type {
+ Some(ms) => match ms {
+ MeasurementType::Build => {
+ metrics.measure_build(sh)?;
+ "build.json"
+ }
+ MeasurementType::AnalyzeSelf => {
+ metrics.measure_analysis_stats_self(sh)?;
+ "self.json"
+ }
+ MeasurementType::AnalyzeRipgrep => {
+ metrics.measure_analysis_stats(sh, "ripgrep")?;
+ "ripgrep.json"
+ }
+ MeasurementType::AnalyzeWebRender => {
+ {
+ // https://github.com/rust-lang/rust-analyzer/issues/9997
+ let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender");
+ cmd!(sh, "cargo update -p url --precise 1.6.1").run()?;
+ }
+ metrics.measure_analysis_stats(sh, "webrender")?;
+ "webrender.json"
+ }
+ MeasurementType::AnalyzeDiesel => {
+ metrics.measure_analysis_stats(sh, "diesel/diesel")?;
+ "diesel.json"
+ }
+ },
+ None => {
+ metrics.measure_build(sh)?;
+ metrics.measure_analysis_stats_self(sh)?;
+ metrics.measure_analysis_stats(sh, "ripgrep")?;
+ metrics.measure_analysis_stats(sh, "webrender")?;
+ metrics.measure_analysis_stats(sh, "diesel/diesel")?;
+ "all.json"
}
+ };
- let _d = sh.push_dir("metrics");
- cmd!(sh, "git add .").run()?;
- cmd!(sh, "git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈")
- .run()?;
- cmd!(sh, "git push origin master").run()?;
- }
+ let mut file =
+ fs::File::options().write(true).create(true).open(format!("target/{}", filename))?;
+ writeln!(file, "{}", metrics.json())?;
eprintln!("{metrics:#?}");
Ok(())
}
diff --git a/src/tools/rust-analyzer/xtask/src/publish.rs b/src/tools/rust-analyzer/xtask/src/publish.rs
index cdb7d8fac..7faae9b20 100644
--- a/src/tools/rust-analyzer/xtask/src/publish.rs
+++ b/src/tools/rust-analyzer/xtask/src/publish.rs
@@ -1,12 +1,12 @@
mod notes;
use crate::flags;
-use anyhow::{anyhow, bail, Result};
+use anyhow::bail;
use std::env;
use xshell::{cmd, Shell};
impl flags::PublishReleaseNotes {
- pub(crate) fn run(self, sh: &Shell) -> Result<()> {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
let asciidoc = sh.read_file(&self.changelog)?;
let mut markdown = notes::convert_asciidoc_to_markdown(std::io::Cursor::new(&asciidoc))?;
let file_name = check_file_name(self.changelog)?;
@@ -24,11 +24,11 @@ impl flags::PublishReleaseNotes {
}
}
-fn check_file_name<P: AsRef<std::path::Path>>(path: P) -> Result<String> {
+fn check_file_name<P: AsRef<std::path::Path>>(path: P) -> anyhow::Result<String> {
let file_name = path
.as_ref()
.file_name()
- .ok_or_else(|| anyhow!("file name is not specified as `changelog`"))?
+ .ok_or_else(|| anyhow::format_err!("file name is not specified as `changelog`"))?
.to_string_lossy();
let mut chars = file_name.chars();
@@ -61,7 +61,7 @@ fn create_original_changelog_url(file_name: &str) -> String {
format!("https://rust-analyzer.github.io/thisweek/{year}/{month}/{day}/{stem}.html")
}
-fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()> {
+fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> anyhow::Result<()> {
let token = match env::var("GITHUB_TOKEN") {
Ok(token) => token,
Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."),
@@ -79,6 +79,7 @@ fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()>
let release_id = cmd!(sh, "jq .id").stdin(release_json).read()?;
let mut patch = String::new();
+ // note: the GitHub API doesn't update the target commit if the tag already exists
write_json::object(&mut patch)
.string("tag_name", tag_name)
.string("target_commitish", "master")